prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>f10-read-timeout.rs<|end_file_name|><|fim▁begin|>/// Figure 10.10: Calling read with a timeout /// /// Takeaway: First I tried with the regular `signal` function of libc /// only to find out that the alarm signal does not interrupt the read /// call. Digging into the C code it got obvious that the signal function /// gets overriden by `lib/signal.c` which is a "reliable version of signal(), /// using POSIX sigaction()". But this function gets only introduced in /// Figure 10.18. This was quite misleading IMO. /// /// $ f10-read-timeout 2>&1 /// read error! /// ERROR: return code 1 extern crate libc; #[macro_use(as_void)] extern crate apue; use libc::{STDOUT_FILENO, STDIN_FILENO, SIGALRM, SIG_ERR, c_int}; use libc::{alarm, write, read, exit}; use apue::signal; const MAXLINE: usize = 4096; <|fim▁hole|> // nothing to do, just return to interrupt the read } fn main() { unsafe { let line: [u8; MAXLINE] = std::mem::uninitialized(); if signal(SIGALRM, sig_alrm) == SIG_ERR { panic!("signal(SIGALRM) error"); } alarm(1); let n = read(STDIN_FILENO, as_void!(line), MAXLINE); if n < 0 { println!("read error!"); exit(1); } alarm(0); write(STDOUT_FILENO, as_void!(line), n as _); } }<|fim▁end|>
fn sig_alrm(_: c_int) {
<|file_name|>Definition.js<|end_file_name|><|fim▁begin|>export default class Definition { get module() { return this._module; } set module(value) { this._module = value; }<|fim▁hole|> get classArguments() { return this._arguments || []; } set classArguments(value) { this._arguments = value; } get tags() { return this._tags || []; } set tags(value) { this._tags = value; } constructor(module = '', classArguments = [], tags = []) { this.module = module; this.classArguments = classArguments; this.tags = tags; } setModule(module) { this.module = module; } addArgument(argument) { this.classArguments.push(argument); return this; } setArgument(index, argument) { this.classArguments[index] = argument; return this; } removeArgument(index) { if (undefined !== this.classArguments[index]) { delete this.classArguments[index]; } return this; } setArguments(classArguments) { this.classArguments = classArguments; } setTags(tags) { this.tags = tags; } getTags() { return this.tags; } addTag(name) { this.tags.push(name); } }<|fim▁end|>
<|file_name|>OAuthJwtAccessTokenValidatorTest.java<|end_file_name|><|fim▁begin|>/* * Copyright 2020 Yahoo Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.yahoo.athenz.auth.oauth.validator; import static org.testng.Assert.*; import java.io.FileInputStream; import java.security.cert.CertificateEncodingException; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import com.yahoo.athenz.auth.oauth.token.OAuthJwtAccessToken; import com.yahoo.athenz.auth.oauth.token.OAuthJwtAccessTokenException; import com.yahoo.athenz.auth.util.CryptoException; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; public class OAuthJwtAccessTokenValidatorTest { private final ClassLoader classLoader = this.getClass().getClassLoader(); private OAuthJwtAccessTokenValidator baseValidator = null; private final X509Certificate readCert(String resourceName) throws Exception { try (FileInputStream certIs = new FileInputStream(this.classLoader.getResource(resourceName).getFile())) { CertificateFactory cf = CertificateFactory.getInstance("X.509"); return (X509Certificate) cf.generateCertificate(certIs); } } @BeforeMethod public void initialize() throws Exception { this.baseValidator = new OAuthJwtAccessTokenValidator() { public void validate(OAuthJwtAccessToken jwt) throws OAuthJwtAccessTokenException {} public void validateClientId(OAuthJwtAccessToken jwt, String clientId) throws OAuthJwtAccessTokenException {} public void validateCertificateBinding(OAuthJwtAccessToken jwt, String certificateThumbprint) throws OAuthJwtAccessTokenException {} }; } @Test public void testGetX509CertificateCommonName() throws Exception { // on null assertThrows(NullPointerException.class, () -> this.baseValidator.getX509CertificateCommonName(null)); X509Certificate cert = this.readCert("jwt_ui.athenz.io.pem"); assertEquals(this.baseValidator.getX509CertificateCommonName(cert), "ui.athenz.io"); } @Test public void testGetX509CertificateThumbprint() throws Exception { // on null assertThrows(NullPointerException.class, () -> this.baseValidator.getX509CertificateThumbprint(null)); X509Certificate cert = this.readCert("jwt_ui.athenz.io.pem"); assertEquals(this.baseValidator.getX509CertificateThumbprint(cert), "zlkxyoX95le-Nv7OI0BxcjTOogvy9PGH-v_CBr_DsEk"); } @Test public void testValidateCertificateBinding() throws Exception { final OAuthJwtAccessTokenValidator mock = Mockito.mock(OAuthJwtAccessTokenValidator.class, Mockito.CALLS_REAL_METHODS); // on CertificateEncodingException<|fim▁hole|> // on CryptoException Mockito.doThrow(new CryptoException()).when(mock).getX509CertificateThumbprint(null); assertThrows(OAuthJwtAccessTokenException.class, () -> mock.validateCertificateBinding(null, (X509Certificate) null)); // actual call OAuthJwtAccessTokenValidator validator = Mockito.mock(OAuthJwtAccessTokenValidator.class, Mockito.CALLS_REAL_METHODS); X509Certificate cert = this.readCert("jwt_ui.athenz.io.pem"); Mockito.doReturn("zlkxyoX95le-Nv7OI0BxcjTOogvy9PGH-v_CBr_DsEk").when(validator).getX509CertificateThumbprint(cert); ArgumentCaptor<OAuthJwtAccessToken> tokenArg = ArgumentCaptor.forClass(OAuthJwtAccessToken.class); ArgumentCaptor<String> thumbprintArg = ArgumentCaptor.forClass(String.class); validator.validateCertificateBinding(null, cert); Mockito.verify(validator, Mockito.times(1)).validateCertificateBinding(tokenArg.capture(), thumbprintArg.capture()); assertNull(tokenArg.getValue()); assertEquals(thumbprintArg.getValue(), "zlkxyoX95le-Nv7OI0BxcjTOogvy9PGH-v_CBr_DsEk"); } }<|fim▁end|>
Mockito.doThrow(new CertificateEncodingException()).when(mock).getX509CertificateThumbprint(null); assertThrows(OAuthJwtAccessTokenException.class, () -> mock.validateCertificateBinding(null, (X509Certificate) null));
<|file_name|>SparkArgsExtractor.java<|end_file_name|><|fim▁begin|>/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oozie.action.hadoop; import com.google.common.annotations.VisibleForTesting; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.apache.commons.lang.StringUtils; import org.apache.directory.api.util.Strings; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.Path; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.StringReader; import java.io.Writer; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.regex.Pattern; import static org.apache.oozie.action.hadoop.SparkActionExecutor.SPARK_DEFAULT_OPTS; @SuppressFBWarnings(value = "PATH_TRAVERSAL_IN", justification = "Properties file should be specified by user") class SparkArgsExtractor { private static final Pattern SPARK_DEFAULTS_FILE_PATTERN = Pattern.compile("spark-defaults.conf"); private static final String FILES_OPTION = "--files"; private static final String ARCHIVES_OPTION = "--archives"; private static final String LOG4J_CONFIGURATION_JAVA_OPTION = "-Dlog4j.configuration="; private static final String SECURITY_TOKENS_HADOOPFS = "spark.yarn.security.tokens.hadoopfs.enabled"; private static final String SECURITY_TOKENS_HIVE = "spark.yarn.security.tokens.hive.enabled"; private static final String SECURITY_TOKENS_HBASE = "spark.yarn.security.tokens.hbase.enabled"; private static final String SECURITY_CREDENTIALS_HADOOPFS = "spark.yarn.security.credentials.hadoopfs.enabled"; private static final String SECURITY_CREDENTIALS_HIVE = "spark.yarn.security.credentials.hive.enabled"; private static final String SECURITY_CREDENTIALS_HBASE = "spark.yarn.security.credentials.hbase.enabled"; private static final String PWD = "$PWD" + File.separator + "*"; private static final String MASTER_OPTION = "--master"; private static final String MODE_OPTION = "--deploy-mode"; private static final String JOB_NAME_OPTION = "--name"; private static final String CLASS_NAME_OPTION = "--class"; private static final String VERBOSE_OPTION = "--verbose"; private static final String DRIVER_CLASSPATH_OPTION = "--driver-class-path"; private static final String EXECUTOR_CLASSPATH = "spark.executor.extraClassPath="; private static final String DRIVER_CLASSPATH = "spark.driver.extraClassPath="; private static final String EXECUTOR_EXTRA_JAVA_OPTIONS = "spark.executor.extraJavaOptions="; private static final String DRIVER_EXTRA_JAVA_OPTIONS = "spark.driver.extraJavaOptions="; private static final Pattern SPARK_VERSION_1 = Pattern.compile("^1.*"); private static final String SPARK_YARN_JAR = "spark.yarn.jar"; private static final String SPARK_YARN_JARS = "spark.yarn.jars"; private static final String OPT_SEPARATOR = "="; private static final String OPT_VALUE_SEPARATOR = ","; private static final String CONF_OPTION = "--conf"; private static final String MASTER_OPTION_YARN_CLUSTER = "yarn-cluster"; private static final String MASTER_OPTION_YARN_CLIENT = "yarn-client"; private static final String MASTER_OPTION_YARN = "yarn"; private static final String DEPLOY_MODE_CLUSTER = "cluster"; private static final String DEPLOY_MODE_CLIENT = "client"; private static final String SPARK_YARN_TAGS = "spark.yarn.tags"; private static final String OPT_PROPERTIES_FILE = "--properties-file"; public static final String SPARK_DEFAULTS_GENERATED_PROPERTIES = "spark-defaults-oozie-generated.properties"; private boolean pySpark = false; private final Configuration actionConf; SparkArgsExtractor(final Configuration actionConf) { this.actionConf = actionConf; } boolean isPySpark() { return pySpark; } List<String> extract(final String[] mainArgs) throws OozieActionConfiguratorException, IOException, URISyntaxException { final List<String> sparkArgs = new ArrayList<>(); sparkArgs.add(MASTER_OPTION); final String master = actionConf.get(SparkActionExecutor.SPARK_MASTER); sparkArgs.add(master); // In local mode, everything runs here in the Launcher Job. // In yarn-client mode, the driver runs here in the Launcher Job and the // executor in Yarn. // In yarn-cluster mode, the driver and executor run in Yarn. final String sparkDeployMode = actionConf.get(SparkActionExecutor.SPARK_MODE); if (sparkDeployMode != null) { sparkArgs.add(MODE_OPTION); sparkArgs.add(sparkDeployMode); } final boolean yarnClusterMode = master.equals(MASTER_OPTION_YARN_CLUSTER) || (master.equals(MASTER_OPTION_YARN) && sparkDeployMode != null && sparkDeployMode.equals(DEPLOY_MODE_CLUSTER)); final boolean yarnClientMode = master.equals(MASTER_OPTION_YARN_CLIENT) || (master.equals(MASTER_OPTION_YARN) && sparkDeployMode != null && sparkDeployMode.equals(DEPLOY_MODE_CLIENT)); sparkArgs.add(JOB_NAME_OPTION); sparkArgs.add(actionConf.get(SparkActionExecutor.SPARK_JOB_NAME)); final String className = actionConf.get(SparkActionExecutor.SPARK_CLASS); if (className != null) { sparkArgs.add(CLASS_NAME_OPTION); sparkArgs.add(className); } appendOoziePropertiesToSparkConf(sparkArgs); String jarPath = actionConf.get(SparkActionExecutor.SPARK_JAR); if (jarPath != null && jarPath.endsWith(".py")) { pySpark = true; } boolean addedSecurityTokensHadoopFS = false; boolean addedSecurityTokensHive = false; boolean addedSecurityTokensHBase = false; boolean addedSecurityCredentialsHadoopFS = false; boolean addedSecurityCredentialsHive = false; boolean addedSecurityCredentialsHBase = false; boolean addedLog4jDriverSettings = false; boolean addedLog4jExecutorSettings = false; final StringBuilder driverClassPath = new StringBuilder(); final StringBuilder executorClassPath = new StringBuilder(); final StringBuilder userFiles = new StringBuilder(); final StringBuilder userArchives = new StringBuilder(); final String sparkOpts = actionConf.get(SparkActionExecutor.SPARK_OPTS); String propertiesFile = null; if (StringUtils.isNotEmpty(sparkOpts)) { final List<String> sparkOptions = SparkOptionsSplitter.splitSparkOpts(sparkOpts); for (int i = 0; i < sparkOptions.size(); i++) { String opt = sparkOptions.get(i); boolean addToSparkArgs = true; if (yarnClusterMode || yarnClientMode) { if (opt.startsWith(EXECUTOR_CLASSPATH)) { appendWithPathSeparator(opt.substring(EXECUTOR_CLASSPATH.length()), executorClassPath); addToSparkArgs = false; } if (opt.startsWith(DRIVER_CLASSPATH)) { appendWithPathSeparator(opt.substring(DRIVER_CLASSPATH.length()), driverClassPath); addToSparkArgs = false; } if (opt.equals(DRIVER_CLASSPATH_OPTION)) { // we need the next element after this option appendWithPathSeparator(sparkOptions.get(i + 1), driverClassPath); // increase i to skip the next element. i++; addToSparkArgs = false; } } if (opt.startsWith(SECURITY_TOKENS_HADOOPFS)) { addedSecurityTokensHadoopFS = true; } if (opt.startsWith(SECURITY_TOKENS_HIVE)) { addedSecurityTokensHive = true; } if (opt.startsWith(SECURITY_TOKENS_HBASE)) { addedSecurityTokensHBase = true; } if (opt.startsWith(SECURITY_CREDENTIALS_HADOOPFS)) { addedSecurityCredentialsHadoopFS = true; } if (opt.startsWith(SECURITY_CREDENTIALS_HIVE)) { addedSecurityCredentialsHive = true; } if (opt.startsWith(SECURITY_CREDENTIALS_HBASE)) { addedSecurityCredentialsHBase = true; } if (opt.startsWith(OPT_PROPERTIES_FILE)){ i++; propertiesFile = sparkOptions.get(i); addToSparkArgs = false; } if (opt.startsWith(EXECUTOR_EXTRA_JAVA_OPTIONS) || opt.startsWith(DRIVER_EXTRA_JAVA_OPTIONS)) { if (!opt.contains(LOG4J_CONFIGURATION_JAVA_OPTION)) { opt += " " + LOG4J_CONFIGURATION_JAVA_OPTION + SparkMain.SPARK_LOG4J_PROPS; } else { System.out.println("Warning: Spark Log4J settings are overwritten." + " Child job IDs may not be available"); } if (opt.startsWith(EXECUTOR_EXTRA_JAVA_OPTIONS)) { addedLog4jExecutorSettings = true; } else { addedLog4jDriverSettings = true; } } if (opt.startsWith(FILES_OPTION)) { final String userFile; if (opt.contains(OPT_SEPARATOR)) { userFile = opt.substring(opt.indexOf(OPT_SEPARATOR) + OPT_SEPARATOR.length()); } else { userFile = sparkOptions.get(i + 1); i++; } if (userFiles.length() > 0) { userFiles.append(OPT_VALUE_SEPARATOR); } userFiles.append(userFile); addToSparkArgs = false; } if (opt.startsWith(ARCHIVES_OPTION)) { final String userArchive; if (opt.contains(OPT_SEPARATOR)) { userArchive = opt.substring(opt.indexOf(OPT_SEPARATOR) + OPT_SEPARATOR.length()); } else { userArchive = sparkOptions.get(i + 1); i++; } if (userArchives.length() > 0) { userArchives.append(OPT_VALUE_SEPARATOR); } userArchives.append(userArchive); addToSparkArgs = false; } if (addToSparkArgs) { sparkArgs.add(opt); } else if (sparkArgs.get(sparkArgs.size() - 1).equals(CONF_OPTION)) { sparkArgs.remove(sparkArgs.size() - 1); } } } if ((yarnClusterMode || yarnClientMode)) { // Include the current working directory (of executor container) // in executor classpath, because it will contain localized // files appendWithPathSeparator(PWD, executorClassPath); appendWithPathSeparator(PWD, driverClassPath); sparkArgs.add(CONF_OPTION); sparkArgs.add(EXECUTOR_CLASSPATH + executorClassPath.toString()); sparkArgs.add(CONF_OPTION); sparkArgs.add(DRIVER_CLASSPATH + driverClassPath.toString()); } if (actionConf.get(LauncherMain.MAPREDUCE_JOB_TAGS) != null) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SPARK_YARN_TAGS + OPT_SEPARATOR + actionConf.get(LauncherMain.MAPREDUCE_JOB_TAGS)); } if (!addedSecurityTokensHadoopFS) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SECURITY_TOKENS_HADOOPFS + OPT_SEPARATOR + Boolean.toString(false)); } if (!addedSecurityTokensHive) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SECURITY_TOKENS_HIVE + OPT_SEPARATOR + Boolean.toString(false)); } if (!addedSecurityTokensHBase) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SECURITY_TOKENS_HBASE + OPT_SEPARATOR + Boolean.toString(false)); } if (!addedSecurityCredentialsHadoopFS) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SECURITY_CREDENTIALS_HADOOPFS + OPT_SEPARATOR + Boolean.toString(false)); } if (!addedSecurityCredentialsHive) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SECURITY_CREDENTIALS_HIVE + OPT_SEPARATOR + Boolean.toString(false)); } if (!addedSecurityCredentialsHBase) { sparkArgs.add(CONF_OPTION); sparkArgs.add(SECURITY_CREDENTIALS_HBASE + OPT_SEPARATOR + Boolean.toString(false)); } if (!addedLog4jExecutorSettings) { sparkArgs.add(CONF_OPTION); sparkArgs.add(EXECUTOR_EXTRA_JAVA_OPTIONS + LOG4J_CONFIGURATION_JAVA_OPTION + SparkMain.SPARK_LOG4J_PROPS); }<|fim▁hole|> if (!addedLog4jDriverSettings) { sparkArgs.add(CONF_OPTION); sparkArgs.add(DRIVER_EXTRA_JAVA_OPTIONS + LOG4J_CONFIGURATION_JAVA_OPTION + SparkMain.SPARK_LOG4J_PROPS); } mergeAndAddPropertiesFile(sparkArgs, propertiesFile); if ((yarnClusterMode || yarnClientMode)) { final Map<String, URI> fixedFileUrisMap = SparkMain.fixFsDefaultUrisAndFilterDuplicates(DistributedCache.getCacheFiles(actionConf)); fixedFileUrisMap.put(SparkMain.SPARK_LOG4J_PROPS, new Path(SparkMain.SPARK_LOG4J_PROPS).toUri()); fixedFileUrisMap.put(SparkMain.HIVE_SITE_CONF, new Path(SparkMain.HIVE_SITE_CONF).toUri()); addUserDefined(userFiles.toString(), fixedFileUrisMap); final Collection<URI> fixedFileUris = fixedFileUrisMap.values(); final JarFilter jarFilter = new JarFilter(fixedFileUris, jarPath); jarFilter.filter(); jarPath = jarFilter.getApplicationJar(); final String cachedFiles = StringUtils.join(fixedFileUris, OPT_VALUE_SEPARATOR); if (cachedFiles != null && !cachedFiles.isEmpty()) { sparkArgs.add(FILES_OPTION); sparkArgs.add(cachedFiles); } final Map<String, URI> fixedArchiveUrisMap = SparkMain.fixFsDefaultUrisAndFilterDuplicates(DistributedCache. getCacheArchives(actionConf)); addUserDefined(userArchives.toString(), fixedArchiveUrisMap); final String cachedArchives = StringUtils.join(fixedArchiveUrisMap.values(), OPT_VALUE_SEPARATOR); if (cachedArchives != null && !cachedArchives.isEmpty()) { sparkArgs.add(ARCHIVES_OPTION); sparkArgs.add(cachedArchives); } setSparkYarnJarsConf(sparkArgs, jarFilter.getSparkYarnJar(), jarFilter.getSparkVersion()); } if (!sparkArgs.contains(VERBOSE_OPTION)) { sparkArgs.add(VERBOSE_OPTION); } sparkArgs.add(jarPath); sparkArgs.addAll(Arrays.asList(mainArgs)); return sparkArgs; } private void mergeAndAddPropertiesFile(final List<String> sparkArgs, final String userDefinedPropertiesFile) throws IOException { final Properties properties = new Properties(); loadServerDefaultProperties(properties); loadLocalizedDefaultPropertiesFile(properties); loadUserDefinedPropertiesFile(userDefinedPropertiesFile, properties); final boolean persisted = persistMergedProperties(properties); if (persisted) { sparkArgs.add(OPT_PROPERTIES_FILE); sparkArgs.add(SPARK_DEFAULTS_GENERATED_PROPERTIES); } } private boolean persistMergedProperties(final Properties properties) throws IOException { if (!properties.isEmpty()) { try (final Writer writer = new OutputStreamWriter( new FileOutputStream(new File(SPARK_DEFAULTS_GENERATED_PROPERTIES)), StandardCharsets.UTF_8.name())) { properties.store(writer, "Properties file generated by Oozie"); System.out.println(String.format("Persisted merged Spark configs in file %s. Merged properties are: %s", SPARK_DEFAULTS_GENERATED_PROPERTIES, Arrays.toString(properties.stringPropertyNames().toArray()))); return true; } catch (IOException e) { System.err.println(String.format("Could not persist derived Spark config file. Reason: %s", e.getMessage())); throw e; } } return false; } private void loadUserDefinedPropertiesFile(final String userDefinedPropertiesFile, final Properties properties) { if (userDefinedPropertiesFile != null) { System.out.println(String.format("Reading Spark config from %s %s...", OPT_PROPERTIES_FILE, userDefinedPropertiesFile)); loadProperties(new File(userDefinedPropertiesFile), properties); } } private void loadLocalizedDefaultPropertiesFile(final Properties properties) { final File localizedDefaultConfFile = SparkMain.getMatchingFile(SPARK_DEFAULTS_FILE_PATTERN); if (localizedDefaultConfFile != null) { System.out.println(String.format("Reading Spark config from file %s...", localizedDefaultConfFile.getName())); loadProperties(localizedDefaultConfFile, properties); } } private void loadServerDefaultProperties(final Properties properties) { final String sparkDefaultsFromServer = actionConf.get(SPARK_DEFAULT_OPTS, ""); if (!sparkDefaultsFromServer.isEmpty()) { System.out.println("Reading Spark config propagated from Oozie server..."); try (final StringReader reader = new StringReader(sparkDefaultsFromServer)) { properties.load(reader); } catch (IOException e) { System.err.println(String.format("Could not read propagated Spark config! Reason: %s", e.getMessage())); } } } private void loadProperties(final File file, final Properties target) { try (final Reader reader = new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8.name())) { final Properties properties = new Properties(); properties.load(reader); for(String key :properties.stringPropertyNames()) { Object prevProperty = target.setProperty(key, properties.getProperty(key)); if(prevProperty != null){ System.out.println(String.format("Value of %s was overwritten from %s", key, file.getName())); } } } catch (IOException e) { System.err.println(String.format("Could not read Spark configs from file %s. Reason: %s", file.getName(), e.getMessage())); } } private void appendWithPathSeparator(final String what, final StringBuilder to) { if (to.length() > 0) { to.append(File.pathSeparator); } to.append(what); } private void addUserDefined(final String userList, final Map<String, URI> urisMap) { if (userList != null) { for (final String file : userList.split(OPT_VALUE_SEPARATOR)) { if (!Strings.isEmpty(file)) { final Path p = new Path(file); urisMap.put(p.getName(), p.toUri()); } } } } /* * Get properties that needs to be passed to Spark as Spark configuration from actionConf. */ @VisibleForTesting void appendOoziePropertiesToSparkConf(final List<String> sparkArgs) { for (final Map.Entry<String, String> oozieConfig : actionConf .getValByRegex("^oozie\\.(?!launcher|spark).+").entrySet()) { sparkArgs.add(CONF_OPTION); sparkArgs.add(String.format("spark.%s=%s", oozieConfig.getKey(), oozieConfig.getValue())); } } /** * Sets spark.yarn.jars for Spark 2.X. Sets spark.yarn.jar for Spark 1.X. * * @param sparkArgs * @param sparkYarnJar * @param sparkVersion */ private void setSparkYarnJarsConf(final List<String> sparkArgs, final String sparkYarnJar, final String sparkVersion) { if (SPARK_VERSION_1.matcher(sparkVersion).find()) { // In Spark 1.X.X, set spark.yarn.jar to avoid // multiple distribution sparkArgs.add(CONF_OPTION); sparkArgs.add(SPARK_YARN_JAR + OPT_SEPARATOR + sparkYarnJar); } else { // In Spark 2.X.X, set spark.yarn.jars sparkArgs.add(CONF_OPTION); sparkArgs.add(SPARK_YARN_JARS + OPT_SEPARATOR + sparkYarnJar); } } }<|fim▁end|>
<|file_name|>S12.10_A1.4_T4.js<|end_file_name|><|fim▁begin|>// Copyright 2009 the Sputnik authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /** * @name: S12.10_A1.4_T4; * @section: 12.10; * @assertion: The with statement adds a computed object to the front of the * scope chain of the current execution context; * @description: Using "with" statement within iteration statement, leading to completion by break; * @strict_mode_negative */ this.p1 = 1; this.p2 = 2; this.p3 = 3; var result = "result"; var myObj = {p1: 'a', p2: 'b', p3: 'c', value: 'myObj_value', valueOf : function(){return 'obj_valueOf';}, parseInt : function(){return 'obj_parseInt';}, NaN : 'obj_NaN', Infinity : 'obj_Infinity', eval : function(){return 'obj_eval';}, parseFloat : function(){return 'obj_parseFloat';}, isNaN : function(){return 'obj_isNaN';}, isFinite : function(){return 'obj_isFinite';} } var del; var st_p1 = "p1"; var st_p2 = "p2"; var st_p3 = "p3"; var st_parseInt = "parseInt"; var st_NaN = "NaN"; var st_Infinity = "Infinity"; var st_eval = "eval"; var st_parseFloat = "parseFloat"; var st_isNaN = "isNaN"; var st_isFinite = "isFinite"; do{ with(myObj){ st_p1 = p1; st_p2 = p2; st_p3 = p3; st_parseInt = parseInt; st_NaN = NaN; st_Infinity = Infinity; st_eval = eval; st_parseFloat = parseFloat; st_isNaN = isNaN; st_isFinite = isFinite; p1 = 'x1'; this.p2 = 'x2'; del = delete p3; var p4 = 'x4'; p5 = 'x5'; var value = 'value'; break; } } while(false); if(!(p1 === 1)){ $ERROR('#1: p1 === 1. Actual: p1 ==='+ p1 ); } if(!(p2 === "x2")){ $ERROR('#2: p2 === "x2". Actual: p2 ==='+ p2 ); } if(!(p3 === 3)){ $ERROR('#3: p3 === 3. Actual: p3 ==='+ p3 ); } if(!(p4 === "x4")){ $ERROR('#4: p4 === "x4". Actual: p4 ==='+ p4 ); } if(!(p5 === "x5")){ $ERROR('#5: p5 === "x5". Actual: p5 ==='+ p5 ); } if(!(myObj.p1 === "x1")){ $ERROR('#6: myObj.p1 === "x1". Actual: myObj.p1 ==='+ myObj.p1 ); } if(!(myObj.p2 === "b")){ $ERROR('#7: myObj.p2 === "b". Actual: myObj.p2 ==='+ myObj.p2 ); } if(!(myObj.p3 === undefined)){ $ERROR('#8: myObj.p3 === undefined. Actual: myObj.p3 ==='+ myObj.p3 ); } <|fim▁hole|> $ERROR('#9: myObj.p4 === undefined. Actual: myObj.p4 ==='+ myObj.p4 ); } if(!(myObj.p5 === undefined)){ $ERROR('#10: myObj.p5 === undefined. Actual: myObj.p5 ==='+ myObj.p5 ); } if(!(st_parseInt !== parseInt)){ $ERROR('#11: myObj.parseInt !== parseInt'); } if(!(st_NaN === "obj_NaN")){ $ERROR('#12: myObj.NaN !== NaN'); } if(!(st_Infinity !== Infinity)){ $ERROR('#13: myObj.Infinity !== Infinity'); } if(!(st_eval !== eval)){ $ERROR('#14: myObj.eval !== eval'); } if(!(st_parseFloat !== parseFloat)){ $ERROR('#15: myObj.parseFloat !== parseFloat'); } if(!(st_isNaN !== isNaN)){ $ERROR('#16: myObj.isNaN !== isNaN'); } if(!(st_isFinite !== isFinite)){ $ERROR('#17: myObj.isFinite !== isFinite'); } if(!(value === undefined)){ $ERROR('#18: value === undefined. Actual: value ==='+ value ); } if(!(myObj.value === "value")){ $ERROR('#19: myObj.value === "value". Actual: myObj.value ==='+ myObj.value ); }<|fim▁end|>
if(!(myObj.p4 === undefined)){
<|file_name|>build_docs.py<|end_file_name|><|fim▁begin|># Copyright 2020 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== r"""Script to generate api_docs for MLMD. The script needs to be run under Python3. The doc generator can be installed with: ``` $> pip3 install git+https://github.com/tensorflow/docs ``` To run from it on the mlmd pip package: ``` python3 ml_metadata/tools/documentation/build_docs.py --output_dir=/tmp/mlmd ``` """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import os from absl import app from absl import flags from tensorflow_docs.api_generator import doc_controls from tensorflow_docs.api_generator import generate_lib from tensorflow_docs.api_generator import public_api import ml_metadata as mlmd from google.protobuf.reflection import GeneratedProtocolMessageType flags.DEFINE_string('output_dir', '/tmp/mlmd_api', 'Where to output the docs') flags.DEFINE_string( 'code_url_prefix', 'https://github.com/google/ml-metadata/tree/master/ml_metadata', 'The url prefix for links to code.') flags.DEFINE_bool('search_hints', True, 'Include metadata search hints in the generated files') flags.DEFINE_string('site_path', 'ml_metadata/api_docs/python', 'Path prefix in the _toc.yaml') FLAGS = flags.FLAGS def ignore_proto_method(path, parent, children): """Remove all the proto inherited methods. Args: path: A tuple of name parts forming the attribute-lookup path to this object. For `tf.keras.layers.Dense` path is: ("tf","keras","layers","Dense") parent: The parent object. children: A list of (name, value) pairs. The attributes of the patent. Returns: A filtered list of children `(name, value)` pairs. With all proto methods removed. """ del path new_children = [] if not isinstance(parent, GeneratedProtocolMessageType): return children new_children = [] for (name, obj) in children: if 'function' in str(obj.__class__): continue new_children.append((name, obj)) return new_children def ignore_attrs_method(path, parent, children): """Remove auto generated attrs methods. Args: path: A tuple of name parts forming the attribute-lookup path to this object. For `tf.keras.layers.Dense` path is: ("tf","keras","layers","Dense") parent: The parent object. children: A list of (name, value) pairs. The attributes of the patent. Returns: A filtered list of children `(name, value)` pairs. With all attrs auto generated methods removed (e.g., __eq__, __ge__, __gt__) """ del path del parent new_children = [] for (name, obj) in children: if name in ['__eq__', '__ge__', '__gt__', '__le__', '__lt__', '__ne__']: continue new_children.append((name, obj)) return new_children def main(args): if args[1:]: raise ValueError('Unrecognized command line args', args[1:]) suppress_docs_for = [] for name in ['version', 'goo'+'gle', 'metadata_store', 'pywrap']: submodule = getattr(mlmd, name, None) if submodule is not None: suppress_docs_for.append(submodule) for obj in suppress_docs_for: doc_controls.do_not_generate_docs(obj) <|fim▁hole|> root_title='ML Metadata', py_modules=[('mlmd', mlmd)], base_dir=os.path.dirname(mlmd.__file__), code_url_prefix=FLAGS.code_url_prefix, search_hints=FLAGS.search_hints, site_path=FLAGS.site_path, private_map={}, callbacks=[ # This filters out objects not defined in the current module or its # sub-modules. public_api.local_definitions_filter, ignore_proto_method, ignore_attrs_method ]) doc_generator.build(output_dir=FLAGS.output_dir) if __name__ == '__main__': app.run(main)<|fim▁end|>
doc_generator = generate_lib.DocGenerator(
<|file_name|>download_floodlight_tag.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # # Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This example downloads activity tags for a given floodlight activity.""" import argparse import sys from apiclient import sample_tools from oauth2client import client # Declare command-line flags. argparser = argparse.ArgumentParser(add_help=False) argparser.add_argument(<|fim▁hole|> help='The ID of the profile to download tags for') argparser.add_argument( 'activity_id', type=int, help='The ID of the floodlight activity to download tags for') def main(argv): # Authenticate and construct service. service, flags = sample_tools.init( argv, 'dfareporting', 'v2.0', __doc__, __file__, parents=[argparser], scope=['https://www.googleapis.com/auth/dfareporting', 'https://www.googleapis.com/auth/dfatrafficking']) profile_id = flags.profile_id activity_id = flags.activity_id try: # Construct the request. request = service.floodlightActivities().generatetag( profileId=profile_id, floodlightActivityId=activity_id) # Execute request and print response. response = request.execute() print response['floodlightActivityTag'] except client.AccessTokenRefreshError: print ('The credentials have been revoked or expired, please re-run the ' 'application to re-authorize') if __name__ == '__main__': main(sys.argv)<|fim▁end|>
'profile_id', type=int,
<|file_name|>constraints3.rs<|end_file_name|><|fim▁begin|>extern crate nalgebra as na; use na::{Point3, RealField, Vector3}; use ncollide3d::shape::{Cuboid, ShapeHandle}; use nphysics3d::force_generator::DefaultForceGeneratorSet; use nphysics3d::joint::DefaultJointConstraintSet; use nphysics3d::joint::{ BallConstraint, PinSlotConstraint, PlanarConstraint, PrismaticConstraint, RectangularConstraint, RevoluteConstraint, UniversalConstraint, }; use nphysics3d::object::{ BodyPartHandle, ColliderDesc, DefaultBodySet, DefaultColliderSet, Ground, RigidBodyDesc, }; use nphysics3d::world::{DefaultGeometricalWorld, DefaultMechanicalWorld}; use nphysics_testbed3d::Testbed; use std::f64::consts::{FRAC_PI_2, PI}; /* * NOTE: The `r` macro is only here to convert from f64 to the `N` scalar type. * This simplifies experimentation with various scalar types (f32, fixed-point numbers, etc.) */ pub fn init_world<N: RealField>(testbed: &mut Testbed<N>) { /* * World */ let mechanical_world = DefaultMechanicalWorld::new(Vector3::new(r!(0.0), r!(-9.81), r!(0.0))); let geometrical_world = DefaultGeometricalWorld::new(); let mut bodies = DefaultBodySet::new(); let mut colliders = DefaultColliderSet::new(); let mut joint_constraints = DefaultJointConstraintSet::new(); let force_generators = DefaultForceGeneratorSet::new(); /* * Ground */ let ground_thickness = r!(0.2); let ground_shape = ShapeHandle::new(Cuboid::new(Vector3::new( r!(3.0), ground_thickness, r!(10.0), ))); let ground_handle = bodies.insert(Ground::new()); let co = ColliderDesc::new(ground_shape) .translation(Vector3::y() * (-ground_thickness - r!(5.0))) .build(BodyPartHandle(ground_handle, 0)); colliders.insert(co); /* * Geometries that will be re-used for several multibody links.. */ let rad = r!(0.2); let cuboid = ShapeHandle::new(Cuboid::new(Vector3::repeat(rad))); let collider_desc = ColliderDesc::new(cuboid.clone()).density(r!(1.0)); /* * Revolute joints. */ let num = 6; let mut parent = BodyPartHandle(ground_handle, 0); let first_anchor = Point3::new(r!(0.0), r!(5.0), r!(11.0)); let mut pos = first_anchor.coords; for i in 0usize..num { let body_anchor = Point3::new(r!(0.0), r!(0.0), r!(1.0)) * (rad * r!(3.0) + r!(0.2)); let parent_anchor = if i == 0 { first_anchor } else { Point3::origin() }; pos -= body_anchor.coords; let rb = RigidBodyDesc::new().translation(pos).build(); let rb_handle = bodies.insert(rb); let co = collider_desc.build(BodyPartHandle(rb_handle, 0)); colliders.insert(co); let mut constraint = RevoluteConstraint::new( parent, BodyPartHandle(rb_handle, 0), parent_anchor, Vector3::x_axis(), body_anchor, Vector3::x_axis(), ); constraint.set_break_force(r!(40.0)); joint_constraints.insert(constraint); parent = BodyPartHandle(rb_handle, 0); } /* * Prismatic constraint. */ let first_anchor = Point3::new(r!(0.0), r!(5.0), r!(4.0)); let mut pos = first_anchor.coords; parent = BodyPartHandle(ground_handle, 0); for i in 0usize..3 { let mut body_anchor = Point3::origin(); let mut parent_anchor = Point3::origin(); if i == 0 { parent_anchor = first_anchor; } else { body_anchor = Point3::new(r!(0.0), r!(0.0), r!(-1.0)) * (rad * r!(3.0)); } pos -= body_anchor.coords; let rb = RigidBodyDesc::new().translation(pos).build(); let rb_handle = bodies.insert(rb); let co = collider_desc.build(BodyPartHandle(rb_handle, 0)); colliders.insert(co); let mut constraint = PrismaticConstraint::new( parent, BodyPartHandle(rb_handle, 0), parent_anchor, Vector3::y_axis(), body_anchor, ); constraint.set_break_force(r!(40.0)); constraint.enable_min_offset(-rad * r!(2.0)); joint_constraints.insert(constraint); parent = BodyPartHandle(rb_handle, 0); } /* * Ball constraint. */ let first_anchor = Point3::new(r!(0.0), r!(5.0), r!(0.0)); let mut pos = first_anchor.coords; parent = BodyPartHandle(ground_handle, 0); for i in 0usize..num { let angle = r!(i as f64) * r!(2.0) * r!(PI) / r!(num as f64); let mut body_anchor = Point3::origin(); let mut parent_anchor = Point3::origin(); if i == 0 { parent_anchor = first_anchor; } else { body_anchor = Point3::new(angle.cos(), r!(0.3), angle.sin()) * (rad * r!(5.0)); } pos -= body_anchor.coords; let rb = RigidBodyDesc::new().translation(pos).build(); let rb_handle = bodies.insert(rb); let co = collider_desc.build(BodyPartHandle(rb_handle, 0)); colliders.insert(co); let mut constraint = BallConstraint::new( parent, BodyPartHandle(rb_handle, 0), parent_anchor, body_anchor, ); constraint.set_break_force(r!(40.0)); joint_constraints.insert(constraint); parent = BodyPartHandle(rb_handle, 0); } /* * Universal constraint. */ let parent_pos = Vector3::new(r!(0.0), r!(5.0), r!(-5.0)); let child_pos = Vector3::new(r!(0.0), r!(5.0), r!(-6.0)); let co = ColliderDesc::new(cuboid) .translation(parent_pos) .build(BodyPartHandle(ground_handle, 0)); colliders.insert(co); let rb = RigidBodyDesc::new().translation(child_pos).build(); let rb_handle = bodies.insert(rb); let co = collider_desc.build(BodyPartHandle(rb_handle, 0)); colliders.insert(co); let mut constraint = UniversalConstraint::new( BodyPartHandle(ground_handle, 0), BodyPartHandle(rb_handle, 0), Point3::from(parent_pos), Vector3::x_axis(), Point3::new(r!(0.0), r!(0.0), r!(1.0)), Vector3::z_axis(), r!(FRAC_PI_2), ); constraint.set_break_force(r!(40.0)); joint_constraints.insert(constraint); /* * Planar constraint. */ let num = 5; let shift = Vector3::new(r!(0.0), r!(-2.0), r!(5.0)); let width = r!(5.0) * rad * r!(4.0); for i in 0..num { for j in 0..num { let mut z = r!(i as f64) * rad * r!(4.0) - width / r!(2.0); let y = r!(j as f64) * rad * r!(4.0) - width / r!(2.0); if j % 2 == 0 { z += rad * r!(2.0); } let rb = RigidBodyDesc::new() .translation(shift + Vector3::new(r!(0.0), y, z)) .build(); let rb_handle = bodies.insert(rb); let co = collider_desc.build(BodyPartHandle(rb_handle, 0)); colliders.insert(co); let mut constraint = PlanarConstraint::new( BodyPartHandle(ground_handle, 0), BodyPartHandle(rb_handle, 0), Point3::origin(), Vector3::x_axis(), Point3::origin(), Vector3::x_axis(), ); constraint.set_break_force(r!(40.0)); joint_constraints.insert(constraint); } } /* * Rectangular constraint. */ let shift = Vector3::new(r!(0.0), r!(-2.0), r!(0.0)); let width = r!(5.0) * rad * r!(4.0); for i in 0..5 { for j in 0..5 { let mut z = r!(i as f64) * rad * r!(4.0) - width / r!(2.0); let y = r!(j as f64) * rad * r!(4.0) - width / r!(2.0); if j % 2 == 0 { z += rad * r!(2.0);<|fim▁hole|> .translation(shift + Vector3::new(r!(0.0), y, z)) .build(); let rb_handle = bodies.insert(rb); let co = collider_desc.build(BodyPartHandle(rb_handle, 0)); colliders.insert(co); let mut constraint = RectangularConstraint::new( BodyPartHandle(ground_handle, 0), BodyPartHandle(rb_handle, 0), Point3::origin(), Vector3::x_axis(), Point3::origin(), ); constraint.set_break_force(r!(40.0)); joint_constraints.insert(constraint); } } /* * Pin-slot constraint. */ let pin_rb = RigidBodyDesc::new().build(); let pin_handle = bodies.insert(pin_rb); let cuboid = ShapeHandle::new(Cuboid::new(Vector3::new(rad * r!(5.0), rad, rad * r!(5.0)))); let co = ColliderDesc::new(cuboid) .density(r!(1.0)) .build(BodyPartHandle(pin_handle, 0)); colliders.insert(co); let mut constraint = PinSlotConstraint::new( BodyPartHandle(ground_handle, 0), BodyPartHandle(pin_handle, 0), Point3::origin(), Vector3::y_axis(), Vector3::x_axis(), Point3::origin(), Vector3::x_axis(), ); constraint.set_break_force(r!(40.0)); joint_constraints.insert(constraint); /* * Set up the testbed. */ testbed.set_ground_handle(Some(ground_handle)); testbed.set_world( mechanical_world, geometrical_world, bodies, colliders, joint_constraints, force_generators, ); testbed.look_at(Point3::new(30.0, -2.0, 0.0), Point3::new(0.0, -2.0, 0.0)); } fn main() { let testbed = Testbed::<f32>::from_builders(0, vec![("Constraints", init_world)]); testbed.run() }<|fim▁end|>
} let rb = RigidBodyDesc::new()
<|file_name|>safeEval.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # Copyright 2016 - 2021 Bas van Meerten and Wouter Franssen # This file is part of ssNake. # # ssNake is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ssNake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ssNake. If not, see <http://www.gnu.org/licenses/>. import re import numpy as np import scipy.special import hypercomplex as hc <|fim▁hole|> Note that this method is still not acceptable to process strings from untrusted sources. Parameters ---------- inp : str String to evaluate. length : int or float, optional The variable length will be set to this value. By default the variable length is not set. Type : {'All', 'FI', 'C'}, optional Type of expected output. 'All' will return all types, 'FI' will return a float or int, and 'C' will return a complex number. By default Type is set to 'All' x : array_like, optional The variable x is set to this variable, By default the variable x is not used. Returns ------- Object The result of the evaluated string. """ env = vars(np).copy() env.update(vars(hc).copy()) env.update(vars(scipy.special).copy()) env.update(vars(scipy.integrate).copy()) env["locals"] = None env["globals"] = None env["__name__"] = None env["__file__"] = None env["__builtins__"] = {'None': None, 'False': False, 'True':True} # None env["slice"] = slice if length is not None: env["length"] = length if x is not None: env["x"] = x inp = re.sub('([0-9]+)[kK]', '\g<1>*1024', str(inp)) try: val = eval(inp, env) if isinstance(val, str): return None if Type == 'All': return val if Type == 'FI': #single float/int type if isinstance(val, (float, int)) and not np.isnan(val) and not np.isinf(val): return val return None if Type == 'C': #single complex number if isinstance(val, (float, int, complex)) and not np.isnan(val) and not np.isinf(val): return val return None except Exception: return None<|fim▁end|>
def safeEval(inp, length=None, Type='All', x=None): """ Creates a more restricted eval environment.
<|file_name|>ParserTrigger.java<|end_file_name|><|fim▁begin|>package org.nnsoft.shs.core.http.parse; /* * Copyright (c) 2012 Simone Tripodi ([email protected]) * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ import org.nnsoft.shs.core.http.MutableRequest; import org.nnsoft.shs.core.http.RequestParseException; /** * ParserTrigger instances are invoked depending on the {@link ParserStatus}. */ interface ParserTrigger {<|fim▁hole|> * @param status the current parser status. * @param token the consumed token. * @param request the request that the parser is currently building * @throws RequestParseException if any syntax error occurs */ ParserStatus onToken( ParserStatus status, String token, MutableRequest request ) throws RequestParseException; }<|fim▁end|>
/** * Performs an parse action on the input token, adding data to the request, depending on the parser status. *
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Test suite for django-staticshard. """ import uuid from urlparse import urlparse from django.conf import settings from django.test import TestCase from django.core.exceptions import ImproperlyConfigured from ..settings import STATICSHARD_HOSTS from ..utils import get_absolute_url class StaticShardTests(TestCase): """Test case for django-staticshard.""" resources = ['img/%s.jpg' % uuid.uuid4() for i in range(0, 20)] <|fim▁hole|> """ Test the absolute_url method. """ for resource in self.resources: urls = [] for i in range(0, 5): url = get_absolute_url(resource) self.assertNotIn(settings.STATIC_URL, url) parts = urlparse(url) self.assertIn(parts.netloc, STATICSHARD_HOSTS) urls.append(url) self.assertEqual(urls[0], urls[1]) self.assertEqual(urls[0], urls[2]) self.assertEqual(urls[0], urls[3]) self.assertEqual(urls[0], urls[4])<|fim▁end|>
def test_absolute_url(self):
<|file_name|>mspimpl.go<|end_file_name|><|fim▁begin|>/* Copyright IBM Corp. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package msp import ( "bytes" "crypto/x509" "crypto/x509/pkix" "encoding/hex" "encoding/pem" "fmt" "github.com/golang/protobuf/proto" "github.com/hyperledger/fabric/bccsp" "github.com/hyperledger/fabric/bccsp/factory" "github.com/hyperledger/fabric/bccsp/signer" m "github.com/hyperledger/fabric/protos/msp" "github.com/pkg/errors" ) // mspSetupFuncType is the prototype of the setup function type mspSetupFuncType func(config *m.FabricMSPConfig) error // validateIdentityOUsFuncType is the prototype of the function to validate identity's OUs type validateIdentityOUsFuncType func(id *identity) error // This is an instantiation of an MSP that // uses BCCSP for its cryptographic primitives. type bccspmsp struct { // version specifies the behaviour of this msp version MSPVersion // The following function pointers are used to change the behaviour // of this MSP depending on its version. // internalSetupFunc is the pointer to the setup function internalSetupFunc mspSetupFuncType // internalValidateIdentityOusFunc is the pointer to the function to validate identity's OUs internalValidateIdentityOusFunc validateIdentityOUsFuncType // list of CA certs we trust rootCerts []Identity // list of intermediate certs we trust intermediateCerts []Identity // list of CA TLS certs we trust tlsRootCerts [][]byte // list of intermediate TLS certs we trust tlsIntermediateCerts [][]byte // certificationTreeInternalNodesMap whose keys correspond to the raw material // (DER representation) of a certificate casted to a string, and whose values // are boolean. True means that the certificate is an internal node of the certification tree. // False means that the certificate corresponds to a leaf of the certification tree. certificationTreeInternalNodesMap map[string]bool // list of signing identities signer SigningIdentity // list of admin identities admins []Identity // the crypto provider bccsp bccsp.BCCSP // the provider identifier for this MSP name string // verification options for MSP members opts *x509.VerifyOptions // list of certificate revocation lists CRL []*pkix.CertificateList // list of OUs ouIdentifiers map[string][][]byte // cryptoConfig contains cryptoConfig *m.FabricCryptoConfig // NodeOUs configuration ouEnforcement bool // These are the OUIdentifiers of the clients, peers and orderers. // They are used to tell apart these entities clientOU, peerOU, ordererOU *OUIdentifier } // newBccspMsp returns an MSP instance backed up by a BCCSP // crypto provider. It handles x.509 certificates and can // generate identities and signing identities backed by // certificates and keypairs func newBccspMsp(version MSPVersion) (MSP, error) { mspLogger.Debugf("Creating BCCSP-based MSP instance") bccsp := factory.GetDefault() theMsp := &bccspmsp{} theMsp.version = version theMsp.bccsp = bccsp switch version { case MSPv1_0: theMsp.internalSetupFunc = theMsp.setupV1 theMsp.internalValidateIdentityOusFunc = theMsp.validateIdentityOUsV1 case MSPv1_1: theMsp.internalSetupFunc = theMsp.setupV11 theMsp.internalValidateIdentityOusFunc = theMsp.validateIdentityOUsV11 default: return nil, errors.Errorf("Invalid MSP version [%v]", version) } return theMsp, nil } func (msp *bccspmsp) getCertFromPem(idBytes []byte) (*x509.Certificate, error) { if idBytes == nil { return nil, errors.New("getCertFromPem error: nil idBytes") } // Decode the pem bytes pemCert, _ := pem.Decode(idBytes) if pemCert == nil { return nil, errors.Errorf("getCertFromPem error: could not decode pem bytes [%v]", idBytes) } // get a cert var cert *x509.Certificate cert, err := x509.ParseCertificate(pemCert.Bytes) if err != nil { return nil, errors.Wrap(err, "getCertFromPem error: failed to parse x509 cert") } return cert, nil } func (msp *bccspmsp) getIdentityFromConf(idBytes []byte) (Identity, bccsp.Key, error) { // get a cert cert, err := msp.getCertFromPem(idBytes) if err != nil { return nil, nil, err } // get the public key in the right format certPubK, err := msp.bccsp.KeyImport(cert, &bccsp.X509PublicKeyImportOpts{Temporary: true}) mspId, err := newIdentity(cert, certPubK, msp) if err != nil { return nil, nil, err } return mspId, certPubK, nil } func (msp *bccspmsp) getSigningIdentityFromConf(sidInfo *m.SigningIdentityInfo) (SigningIdentity, error) { if sidInfo == nil { return nil, errors.New("getIdentityFromBytes error: nil sidInfo") } // Extract the public part of the identity idPub, pubKey, err := msp.getIdentityFromConf(sidInfo.PublicSigner) if err != nil { return nil, err } // Find the matching private key in the BCCSP keystore privKey, err := msp.bccsp.GetKey(pubKey.SKI()) // Less Secure: Attempt to import Private Key from KeyInfo, if BCCSP was not able to find the key if err != nil { mspLogger.Debugf("Could not find SKI [%s], trying KeyMaterial field: %+v\n", hex.EncodeToString(pubKey.SKI()), err) if sidInfo.PrivateSigner == nil || sidInfo.PrivateSigner.KeyMaterial == nil { return nil, errors.New("KeyMaterial not found in SigningIdentityInfo") } pemKey, _ := pem.Decode(sidInfo.PrivateSigner.KeyMaterial) privKey, err = msp.bccsp.KeyImport(pemKey.Bytes, &bccsp.ECDSAPrivateKeyImportOpts{Temporary: true}) if err != nil { return nil, errors.WithMessage(err, "getIdentityFromBytes error: Failed to import EC private key") } } // get the peer signer peerSigner, err := signer.New(msp.bccsp, privKey) if err != nil { return nil, errors.WithMessage(err, "getIdentityFromBytes error: Failed initializing bccspCryptoSigner") } return newSigningIdentity(idPub.(*identity).cert, idPub.(*identity).pk, peerSigner, msp) } // Setup sets up the internal data structures // for this MSP, given an MSPConfig ref; it // returns nil in case of success or an error otherwise func (msp *bccspmsp) Setup(conf1 *m.MSPConfig) error { if conf1 == nil { return errors.New("Setup error: nil conf reference") } // given that it's an msp of type fabric, extract the MSPConfig instance conf := &m.FabricMSPConfig{} err := proto.Unmarshal(conf1.Config, conf) if err != nil { return errors.Wrap(err, "failed unmarshalling fabric msp config") } // set the name for this msp msp.name = conf.Name mspLogger.Debugf("Setting up MSP instance %s", msp.name) // setup return msp.internalSetupFunc(conf) } // GetType returns the type for this MSP func (msp *bccspmsp) GetType() ProviderType { return FABRIC } // GetIdentifier returns the MSP identifier for this instance func (msp *bccspmsp) GetIdentifier() (string, error) { return msp.name, nil } // GetTLSRootCerts returns the root certificates for this MSP func (msp *bccspmsp) GetTLSRootCerts() [][]byte { return msp.tlsRootCerts } // GetTLSIntermediateCerts returns the intermediate root certificates for this MSP func (msp *bccspmsp) GetTLSIntermediateCerts() [][]byte { return msp.tlsIntermediateCerts } // GetDefaultSigningIdentity returns the // default signing identity for this MSP (if any) func (msp *bccspmsp) GetDefaultSigningIdentity() (SigningIdentity, error) { mspLogger.Debugf("Obtaining default signing identity") if msp.signer == nil { return nil, errors.New("this MSP does not possess a valid default signing identity") } return msp.signer, nil } // GetSigningIdentity returns a specific signing // identity identified by the supplied identifier func (msp *bccspmsp) GetSigningIdentity(identifier *IdentityIdentifier) (SigningIdentity, error) { // TODO return nil, errors.Errorf("no signing identity for %#v", identifier) } // Validate attempts to determine whether // the supplied identity is valid according // to this MSP's roots of trust; it returns // nil in case the identity is valid or an // error otherwise func (msp *bccspmsp) Validate(id Identity) error { mspLogger.Debugf("MSP %s validating identity", msp.name) switch id := id.(type) { // If this identity is of this specific type, // this is how I can validate it given the // root of trust this MSP has case *identity: return msp.validateIdentity(id) default: return errors.New("identity type not recognized") } } // DeserializeIdentity returns an Identity given the byte-level // representation of a SerializedIdentity struct func (msp *bccspmsp) DeserializeIdentity(serializedID []byte) (Identity, error) { mspLogger.Infof("Obtaining identity") // We first deserialize to a SerializedIdentity to get the MSP ID sId := &m.SerializedIdentity{} err := proto.Unmarshal(serializedID, sId) if err != nil { return nil, errors.Wrap(err, "could not deserialize a SerializedIdentity") } if sId.Mspid != msp.name { return nil, errors.Errorf("expected MSP ID %s, received %s", msp.name, sId.Mspid) } return msp.deserializeIdentityInternal(sId.IdBytes) } // deserializeIdentityInternal returns an identity given its byte-level representation func (msp *bccspmsp) deserializeIdentityInternal(serializedIdentity []byte) (Identity, error) { // This MSP will always deserialize certs this way bl, _ := pem.Decode(serializedIdentity) if bl == nil { return nil, errors.New("could not decode the PEM structure") } cert, err := x509.ParseCertificate(bl.Bytes) if err != nil { return nil, errors.Wrap(err, "parseCertificate failed") } // Now we have the certificate; make sure that its fields // (e.g. the Issuer.OU or the Subject.OU) match with the // MSP id that this MSP has; otherwise it might be an attack // TODO! // We can't do it yet because there is no standardized way // (yet) to encode the MSP ID into the x.509 body of a cert pub, err := msp.bccsp.KeyImport(cert, &bccsp.X509PublicKeyImportOpts{Temporary: true}) if err != nil { return nil, errors.WithMessage(err, "failed to import certificate's public key") } return newIdentity(cert, pub, msp) } // SatisfiesPrincipal returns null if the identity matches the principal or an error otherwise func (msp *bccspmsp) SatisfiesPrincipal(id Identity, principal *m.MSPPrincipal) error { switch principal.PrincipalClassification { // in this case, we have to check whether the // identity has a role in the msp - member or admin case m.MSPPrincipal_ROLE: // Principal contains the msp role mspRole := &m.MSPRole{} err := proto.Unmarshal(principal.Principal, mspRole) if err != nil { return errors.Wrap(err, "could not unmarshal MSPRole from principal") } // at first, we check whether the MSP // identifier is the same as that of the identity if mspRole.MspIdentifier != msp.name { return errors.Errorf("the identity is a member of a different MSP (expected %s, got %s)", mspRole.MspIdentifier, id.GetMSPIdentifier()) } // now we validate the different msp roles switch mspRole.Role { case m.MSPRole_MEMBER: // in the case of member, we simply check // whether this identity is valid for the MSP mspLogger.Debugf("Checking if identity satisfies MEMBER role for %s", msp.name) return msp.Validate(id) case m.MSPRole_ADMIN: mspLogger.Debugf("Checking if identity satisfies ADMIN role for %s", msp.name) // in the case of admin, we check that the // id is exactly one of our admins for _, admincert := range msp.admins { if bytes.Equal(id.(*identity).cert.Raw, admincert.(*identity).cert.Raw) { // we do not need to check whether the admin is a valid identity // according to this MSP, since we already check this at Setup time // if there is a match, we can just return return nil } } return errors.New("This identity is not an admin") default: return errors.Errorf("invalid MSP role type %d", int32(mspRole.Role)) } case m.MSPPrincipal_IDENTITY: // in this case we have to deserialize the principal's identity // and compare it byte-by-byte with our cert principalId, err := msp.DeserializeIdentity(principal.Principal) if err != nil { return errors.WithMessage(err, "invalid identity principal, not a certificate") } if bytes.Equal(id.(*identity).cert.Raw, principalId.(*identity).cert.Raw) { return principalId.Validate() } return errors.New("The identities do not match") case m.MSPPrincipal_ORGANIZATION_UNIT: // Principal contains the OrganizationUnit OU := &m.OrganizationUnit{} err := proto.Unmarshal(principal.Principal, OU) if err != nil { return errors.Wrap(err, "could not unmarshal OrganizationUnit from principal") } // at first, we check whether the MSP // identifier is the same as that of the identity if OU.MspIdentifier != msp.name { return errors.Errorf("the identity is a member of a different MSP (expected %s, got %s)", OU.MspIdentifier, id.GetMSPIdentifier()) } // we then check if the identity is valid with this MSP // and fail if it is not err = msp.Validate(id) if err != nil { return err } // now we check whether any of this identity's OUs match the requested one for _, ou := range id.GetOrganizationalUnits() { if ou.OrganizationalUnitIdentifier == OU.OrganizationalUnitIdentifier && bytes.Equal(ou.CertifiersIdentifier, OU.CertifiersIdentifier) { return nil } } // if we are here, no match was found, return an error return errors.New("The identities do not match") default: return errors.Errorf("invalid principal type %d", int32(principal.PrincipalClassification)) } } // getCertificationChain returns the certification chain of the passed identity within this msp func (msp *bccspmsp) getCertificationChain(id Identity) ([]*x509.Certificate, error) { mspLogger.Debugf("MSP %s getting certification chain", msp.name) switch id := id.(type) { // If this identity is of this specific type, // this is how I can validate it given the // root of trust this MSP has case *identity: return msp.getCertificationChainForBCCSPIdentity(id) default: return nil, errors.New("identity type not recognized") } } // getCertificationChainForBCCSPIdentity returns the certification chain of the passed bccsp identity within this msp func (msp *bccspmsp) getCertificationChainForBCCSPIdentity(id *identity) ([]*x509.Certificate, error) { if id == nil { return nil, errors.New("Invalid bccsp identity. Must be different from nil.") } // we expect to have a valid VerifyOptions instance if msp.opts == nil { return nil, errors.New("Invalid msp instance") } // CAs cannot be directly used as identities.. if id.cert.IsCA { return nil, errors.New("A CA certificate cannot be used directly by this MSP") } return msp.getValidationChain(id.cert, false) } func (msp *bccspmsp) getUniqueValidationChain(cert *x509.Certificate, opts x509.VerifyOptions) ([]*x509.Certificate, error) { // ask golang to validate the cert for us based on the options that we've built at setup time if msp.opts == nil { return nil, errors.New("the supplied identity has no verify options") } validationChains, err := cert.Verify(opts) if err != nil { return nil, errors.WithMessage(err, "the supplied identity is not valid") } // we only support a single validation chain; // if there's more than one then there might // be unclarity about who owns the identity if len(validationChains) != 1 { return nil, errors.Errorf("this MSP only supports a single validation chain, got %d", len(validationChains)) } return validationChains[0], nil } func (msp *bccspmsp) getValidationChain(cert *x509.Certificate, isIntermediateChain bool) ([]*x509.Certificate, error) { validationChain, err := msp.getUniqueValidationChain(cert, msp.getValidityOptsForCert(cert)) if err != nil { return nil, errors.WithMessage(err, "failed getting validation chain") } // we expect a chain of length at least 2 if len(validationChain) < 2 { return nil, errors.Errorf("expected a chain of length at least 2, got %d", len(validationChain)) } // check that the parent is a leaf of the certification tree // if validating an intermediate chain, the first certificate will the parent parentPosition := 1 if isIntermediateChain { parentPosition = 0 } if msp.certificationTreeInternalNodesMap[string(validationChain[parentPosition].Raw)] { return nil, errors.Errorf("invalid validation chain. Parent certificate should be a leaf of the certification tree [%v]", cert.Raw) } return validationChain, nil } // getCertificationChainIdentifier returns the certification chain identifier of the passed identity within this msp. // The identifier is computes as the SHA256 of the concatenation of the certificates in the chain. func (msp *bccspmsp) getCertificationChainIdentifier(id Identity) ([]byte, error) { chain, err := msp.getCertificationChain(id) if err != nil { return nil, errors.WithMessage(err, fmt.Sprintf("failed getting certification chain for [%v]", id)) } // chain[0] is the certificate representing the identity. // It will be discarded return msp.getCertificationChainIdentifierFromChain(chain[1:]) } func (msp *bccspmsp) getCertificationChainIdentifierFromChain(chain []*x509.Certificate) ([]byte, error) { // Hash the chain // Use the hash of the identity's certificate as id in the IdentityIdentifier hashOpt, err := bccsp.GetHashOpt(msp.cryptoConfig.IdentityIdentifierHashFunction) if err != nil { return nil, errors.WithMessage(err, "failed getting hash function options") } hf, err := msp.bccsp.GetHash(hashOpt) if err != nil { return nil, errors.WithMessage(err, "failed getting hash function when computing certification chain identifier") } for i := 0; i < len(chain); i++ { hf.Write(chain[i].Raw) } return hf.Sum(nil), nil } // sanitizeCert ensures that x509 certificates signed using ECDSA // do have signatures in Low-S. If this is not the case, the certificate<|fim▁hole|>func (msp *bccspmsp) sanitizeCert(cert *x509.Certificate) (*x509.Certificate, error) { if isECDSASignedCert(cert) { // Lookup for a parent certificate to perform the sanitization var parentCert *x509.Certificate if cert.IsCA { // at this point, cert might be a root CA certificate // or an intermediate CA certificate chain, err := msp.getUniqueValidationChain(cert, msp.getValidityOptsForCert(cert)) if err != nil { return nil, err } if len(chain) == 1 { // cert is a root CA certificate parentCert = cert } else { // cert is an intermediate CA certificate parentCert = chain[1] } } else { chain, err := msp.getUniqueValidationChain(cert, msp.getValidityOptsForCert(cert)) if err != nil { return nil, err } parentCert = chain[1] } // Sanitize var err error cert, err = sanitizeECDSASignedCert(cert, parentCert) if err != nil { return nil, err } } return cert, nil } // IsWellFormed checks if the given identity can be deserialized into its provider-specific form. // In this MSP implementation, well formed means that the PEM has a Type which is either // the string 'CERTIFICATE' or the Type is missing altogether. func (msp *bccspmsp) IsWellFormed(identity *m.SerializedIdentity) error { bl, _ := pem.Decode(identity.IdBytes) if bl == nil { return errors.New("PEM decoding resulted in an empty block") } // Important: This method looks very similar to getCertFromPem(idBytes []byte) (*x509.Certificate, error) // But we: // 1) Must ensure PEM block is of type CERTIFICATE or is empty // 2) Must not replace getCertFromPem with this method otherwise we will introduce // a change in validation logic which will result in a chain fork. if bl.Type != "CERTIFICATE" && bl.Type != "" { return errors.Errorf("pem type is %s, should be 'CERTIFICATE' or missing", bl.Type) } _, err := x509.ParseCertificate(bl.Bytes) return err }<|fim▁end|>
// is regenerated to have a Low-S signature.
<|file_name|>David Yuen Shades of Blue.py<|end_file_name|><|fim▁begin|>#David Yuen #September 11, 2017 #This program draws different shades of blue.<|fim▁hole|>bob.shape("turtle") bob.backward(100) for i in range(0,255,10): bob.forward(10) bob.pensize(i) bob.color(0,0,i)<|fim▁end|>
import turtle turtle.colormode(255) bob = turtle.Turtle()
<|file_name|>syntax-extension-fmt.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. extern mod std; fn test(actual: ~str, expected: ~str) { debug!(actual.clone()); debug!(expected.clone()); assert!((actual == expected)); } pub fn main() { test(fmt!("hello %d friends and %s things", 10, ~"formatted"), ~"hello 10 friends and formatted things"); test(fmt!("test"), ~"test"); // a quadratic optimization in LLVM (jump-threading) makes this test a // bit slow to compile unless we break it up part1(); part2(); part3(); part4(); part5(); part6(); percent(); more_floats(); } fn part1() { // Simple tests for types test(fmt!("%d", 1), ~"1"); test(fmt!("%i", 2), ~"2"); test(fmt!("%i", -1), ~"-1"); test(fmt!("%u", 10u), ~"10"); test(fmt!("%s", ~"test"), ~"test"); test(fmt!("%b", true), ~"true"); test(fmt!("%b", false), ~"false"); test(fmt!("%c", 'A'), ~"A"); test(fmt!("%x", 0xff_u), ~"ff"); test(fmt!("%X", 0x12ab_u), ~"12AB"); test(fmt!("%o", 10u), ~"12"); test(fmt!("%t", 0b11010101_u), ~"11010101"); test(fmt!("%f", 5.82), ~"5.82"); // 32-bit limits test(fmt!("%i", -2147483648), ~"-2147483648"); test(fmt!("%i", 2147483647), ~"2147483647"); test(fmt!("%u", 4294967295u), ~"4294967295"); test(fmt!("%x", 0xffffffff_u), ~"ffffffff"); test(fmt!("%o", 0xffffffff_u), ~"37777777777"); test(fmt!("%t", 0xffffffff_u), ~"11111111111111111111111111111111"); } fn part2() { // Widths test(fmt!("%1d", 500), ~"500"); test(fmt!("%10d", 500), ~" 500"); test(fmt!("%10d", -500), ~" -500"); test(fmt!("%10u", 500u), ~" 500"); test(fmt!("%10s", ~"test"), ~" test"); test(fmt!("%10b", true), ~" true"); test(fmt!("%10x", 0xff_u), ~" ff"); test(fmt!("%10X", 0xff_u), ~" FF"); test(fmt!("%10o", 10u), ~" 12"); test(fmt!("%10t", 0xff_u), ~" 11111111"); test(fmt!("%10c", 'A'), ~" A"); test(fmt!("%10f", 5.82), ~" 5.82"); // Left justify test(fmt!("%-10d", 500), ~"500 "); test(fmt!("%-10d", -500), ~"-500 "); test(fmt!("%-10u", 500u), ~"500 "); test(fmt!("%-10s", ~"test"), ~"test "); test(fmt!("%-10b", true), ~"true "); test(fmt!("%-10x", 0xff_u), ~"ff "); test(fmt!("%-10X", 0xff_u), ~"FF "); test(fmt!("%-10o", 10u), ~"12 "); test(fmt!("%-10t", 0xff_u), ~"11111111 "); test(fmt!("%-10c", 'A'), ~"A "); test(fmt!("%-10f", 5.82), ~"5.82 "); } fn part3() { // Precision test(fmt!("%.d", 0), ~""); test(fmt!("%.u", 0u), ~""); test(fmt!("%.x", 0u), ~""); test(fmt!("%.t", 0u), ~""); test(fmt!("%.d", 10), ~"10"); test(fmt!("%.d", -10), ~"-10"); test(fmt!("%.u", 10u), ~"10"); test(fmt!("%.s", ~"test"), ~""); test(fmt!("%.x", 127u), ~"7f"); test(fmt!("%.o", 10u), ~"12"); test(fmt!("%.t", 3u), ~"11"); test(fmt!("%.c", 'A'), ~"A"); test(fmt!("%.f", 5.82), ~"6"); test(fmt!("%.0d", 0), ~""); test(fmt!("%.0u", 0u), ~""); test(fmt!("%.0x", 0u), ~""); test(fmt!("%.0t", 0u), ~""); test(fmt!("%.0d", 10), ~"10"); test(fmt!("%.0d", -10), ~"-10"); test(fmt!("%.0u", 10u), ~"10"); test(fmt!("%.0s", ~"test"), ~""); test(fmt!("%.0x", 127u), ~"7f"); test(fmt!("%.0o", 10u), ~"12"); test(fmt!("%.0t", 3u), ~"11"); test(fmt!("%.0c", 'A'), ~"A"); test(fmt!("%.0f", 5.892), ~"6"); test(fmt!("%.1d", 0), ~"0"); test(fmt!("%.1u", 0u), ~"0"); test(fmt!("%.1x", 0u), ~"0"); test(fmt!("%.1t", 0u), ~"0"); test(fmt!("%.1d", 10), ~"10"); test(fmt!("%.1d", -10), ~"-10"); test(fmt!("%.1u", 10u), ~"10"); test(fmt!("%.1s", ~"test"), ~"t"); test(fmt!("%.1x", 127u), ~"7f"); test(fmt!("%.1o", 10u), ~"12"); test(fmt!("%.1t", 3u), ~"11"); test(fmt!("%.1c", 'A'), ~"A"); test(fmt!("%.1f", 5.82), ~"5.8"); } fn part4() { test(fmt!("%.5d", 0), ~"00000"); test(fmt!("%.5u", 0u), ~"00000"); test(fmt!("%.5x", 0u), ~"00000"); test(fmt!("%.5t", 0u), ~"00000"); test(fmt!("%.5d", 10), ~"00010"); test(fmt!("%.5d", -10), ~"-00010"); test(fmt!("%.5u", 10u), ~"00010"); test(fmt!("%.5s", ~"test"), ~"test"); test(fmt!("%.5x", 127u), ~"0007f"); test(fmt!("%.5o", 10u), ~"00012"); test(fmt!("%.5t", 3u), ~"00011"); test(fmt!("%.5c", 'A'), ~"A"); test(fmt!("%.5f", 5.82), ~"5.82000"); test(fmt!("%.5f", 5.0), ~"5.00000"); test(fmt!("%.100f", 1.1), ~"1.1000000000000000888178419700125232338905334472656250000000000000000000000000000000000000000000000000"); // Bool precision. I'm not sure if it's good or bad to have bool // conversions support precision - it's not standard printf so we // can do whatever. For now I'm making it behave the same as string // conversions. test(fmt!("%.b", true), ~""); test(fmt!("%.0b", true), ~""); test(fmt!("%.1b", true), ~"t"); } fn part5() { // Explicit + sign. Only for signed conversions test(fmt!("%+d", 0), ~"+0"); test(fmt!("%+d", 1), ~"+1"); test(fmt!("%+d", -1), ~"-1"); test(fmt!("%+f", 0.0), ~"+0"); // Leave space for sign test(fmt!("% d", 0), ~" 0"); test(fmt!("% d", 1), ~" 1"); test(fmt!("% d", -1), ~"-1"); test(fmt!("% f", 0.0), ~" 0");<|fim▁hole|> test(fmt!("% +d", 0), ~"+0"); test(fmt!("%+ d", 0), ~"+0"); test(fmt!("% +f", 0.0), ~"+0"); test(fmt!("%+ f", 0.0), ~"+0"); // 0-padding test(fmt!("%05d", 0), ~"00000"); test(fmt!("%05d", 1), ~"00001"); test(fmt!("%05d", -1), ~"-0001"); test(fmt!("%05u", 1u), ~"00001"); test(fmt!("%05x", 127u), ~"0007f"); test(fmt!("%05X", 127u), ~"0007F"); test(fmt!("%05o", 10u), ~"00012"); test(fmt!("%05t", 3u), ~"00011"); test(fmt!("%05f", 5.82), ~"05.82"); // 0-padding a string is undefined but glibc does this: test(fmt!("%05s", ~"test"), ~" test"); test(fmt!("%05c", 'A'), ~" A"); test(fmt!("%05b", true), ~" true"); // Left-justify overrides 0-padding test(fmt!("%-05d", 0), ~"0 "); test(fmt!("%-05d", 1), ~"1 "); test(fmt!("%-05d", -1), ~"-1 "); test(fmt!("%-05u", 1u), ~"1 "); test(fmt!("%-05x", 127u), ~"7f "); test(fmt!("%-05X", 127u), ~"7F "); test(fmt!("%-05o", 10u), ~"12 "); test(fmt!("%-05t", 3u), ~"11 "); test(fmt!("%-05s", ~"test"), ~"test "); test(fmt!("%-05c", 'A'), ~"A "); test(fmt!("%-05b", true), ~"true "); test(fmt!("%-05f", 5.82), ~"5.82 "); } fn part6() { // Precision overrides 0-padding // FIXME #2481: Recent gcc's report some of these as warnings test(fmt!("%06.5d", 0), ~" 00000"); test(fmt!("%06.5u", 0u), ~" 00000"); test(fmt!("%06.5x", 0u), ~" 00000"); test(fmt!("%06.5d", 10), ~" 00010"); test(fmt!("%06.5d", -10), ~"-00010"); test(fmt!("%06.5u", 10u), ~" 00010"); test(fmt!("%06.5s", ~"test"), ~" test"); test(fmt!("%06.5c", 'A'), ~" A"); test(fmt!("%06.5x", 127u), ~" 0007f"); test(fmt!("%06.5X", 127u), ~" 0007F"); test(fmt!("%06.5o", 10u), ~" 00012"); // Precision does not override zero-padding for floats test(fmt!("%08.5f", 5.82), ~"05.82000"); // Signed combinations test(fmt!("% 5d", 1), ~" 1"); test(fmt!("% 5d", -1), ~" -1"); test(fmt!("%+5d", 1), ~" +1"); test(fmt!("%+5d", -1), ~" -1"); test(fmt!("% 05d", 1), ~" 0001"); test(fmt!("% 05d", -1), ~"-0001"); test(fmt!("%+05d", 1), ~"+0001"); test(fmt!("%+05d", -1), ~"-0001"); test(fmt!("%- 5d", 1), ~" 1 "); test(fmt!("%- 5d", -1), ~"-1 "); test(fmt!("%-+5d", 1), ~"+1 "); test(fmt!("%-+5d", -1), ~"-1 "); test(fmt!("%- 05d", 1), ~" 1 "); test(fmt!("%- 05d", -1), ~"-1 "); test(fmt!("%-+05d", 1), ~"+1 "); test(fmt!("%-+05d", -1), ~"-1 "); } fn percent() { let s = fmt!("ab%%cd"); assert!((s == ~"ab%cd")); } fn more_floats() { assert!(~"3.1416" == fmt!("%.4f", 3.14159)); assert!(~"3" == fmt!("%.0f", 3.14159)); assert!(~"99" == fmt!("%.0f", 98.5)); assert!(~"7.0000" == fmt!("%.4f", 6.999999999)); assert!(~"3.141590000" == fmt!("%.9f", 3.14159)); }<|fim▁end|>
// Plus overrides space
<|file_name|>sort_files_by_rcp.py<|end_file_name|><|fim▁begin|>import os, glob, shutil from pathos import multiprocessing as mp import pandas as pd import numpy as np base_path = '/Data/malindgren/cru_november_final/IEM/ar5' output_base_path = '/Data/malindgren/cru_november_final/IEM/ar5' models = [ 'IPSL-CM5A-LR', 'GISS-E2-R', 'MRI-CGCM3', 'CCSM4', 'GFDL-CM3' ] # variables = ['rsds', 'vap' ] for model in models: variables = os.listdir( os.path.join( base_path, model ) ) _ = [ os.makedirs( os.path.join( base_path, model, variable ) ) for variable in variables if not os.path.exists( os.path.join( base_path, model, variable ) ) ] for variable in variables: print( ' '.join([model, variable]) ) output_path = os.path.join( output_base_path, model, variable, 'downscaled' ) cur_path = os.path.join( base_path, model, variable, 'downscaled' ) l = pd.Series( glob.glob( os.path.join( cur_path, '*.tif' ) ) ) grouper = [ os.path.basename(i).split( '_' )[ 5 ] for i in l ] rcp_groups = l.groupby( grouper ) name_group = [ group for group in rcp_groups ] names = [ i[0] for i in name_group ] _ = [ os.makedirs( os.path.join( output_path, name ) ) for name in names if not os.path.exists( os.path.join( output_path, name ) ) ] for count, name in enumerate( names ): print count group = name_group[ count ] out_group = [ os.path.join( output_path, name, os.path.basename( i ) ) for i in group[1] ] def run( x, y ): import shutil return shutil.move( x, y ) pool = mp.Pool( 15 ) out = pool.map( lambda x: run(x[0], x[1]), zip( group[1], out_group ) )<|fim▁hole|><|fim▁end|>
pool.close()
<|file_name|>MetaCycExtracter.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * Manchester Centre for Integrative Systems Biology * University of Manchester * Manchester M1 7ND * United Kingdom * * Copyright (C) 2008 University of Manchester * * This program is released under the Academic Free License ("AFL") v3.0. * (http://www.opensource.org/licenses/academic.php) *******************************************************************************/ package org.mcisb.subliminal.metacyc; import java.io.*; import java.util.*; import org.mcisb.subliminal.*; import org.mcisb.subliminal.model.*; import org.mcisb.subliminal.sbml.*; import org.sbml.jsbml.*; /** * @author Neil Swainston */ public class MetaCycExtracter extends Extracter { /** * */ private final static String COMPARTMENT_SUFFIX = "_CCO_"; //$NON-NLS-1$ /** * */ private final static String MXN_REF_PREFIX = "metacyc:"; //$NON-NLS-1$ /** * * @param taxonomyId * @param outFile * @param metaCycDirectory * @throws Exception */ public static void run( final String taxonomyId, final File outFile, final File metaCycDirectory ) throws Exception { final String taxonomyName = SubliminalUtils.getTaxonomyName( taxonomyId ); if( taxonomyName == null ) { throw new UnsupportedOperationException( "MetaCyc data unavailable for NCBI Taxonomy id " + taxonomyId ); //$NON-NLS-1$ } final SBMLDocument document = initDocument( taxonomyId ); run( taxonomyId, taxonomyName, document, metaCycDirectory, false );<|fim▁hole|> /** * * @param taxonomyId * @param outFile * @throws Exception */ public static void run( final String taxonomyId, final File outFile ) throws Exception { final SBMLDocument document = initDocument( taxonomyId ); run( taxonomyId, document ); XmlFormatter.getInstance().write( document, outFile ); SbmlFactory.getInstance().unregister(); } /** * * @param taxonomyId * @param document * @throws Exception */ public static void run( final String taxonomyId, final SBMLDocument document ) throws Exception { final String taxonomyName = SubliminalUtils.getTaxonomyName( taxonomyId ); if( taxonomyName == null ) { throw new UnsupportedOperationException( "MetaCyc data unavailable for NCBI Taxonomy id " + taxonomyId ); //$NON-NLS-1$ } final File tempDirectory = new File( System.getProperty( "java.io.tmpdir" ) ); //$NON-NLS-1$ run( taxonomyId, taxonomyName, document, new File( tempDirectory, taxonomyName ), true ); } /** * * @param taxonomyId * @param document * @param metaCycDirectory * @throws Exception */ private static void run( final String taxonomyId, final String taxonomyName, final SBMLDocument document, final File metaCycDirectory, final boolean deleteSource ) throws Exception { try { final File metaCycSource = MetaCycDownloader.getMetaCycSource( metaCycDirectory, taxonomyName ); final File sbml = SubliminalUtils.find( metaCycSource, "metabolic-reactions.sbml" ); //$NON-NLS-1$ if( sbml != null ) { System.out.println( "MetaCyc: " + taxonomyName ); //$NON-NLS-1$ final MetaCycFactory metaCycFactory = initFactory( metaCycSource ); final SBMLDocument inDocument = new SBMLReader().readSBML( sbml ); final Model inModel = inDocument.getModel(); for( int l = 0; l < inModel.getNumReactions(); l++ ) { final Reaction inReaction = inModel.getReaction( l ); final Reaction outReaction = addReaction( document.getModel(), inReaction, taxonomyId, metaCycFactory ); if( inReaction.isSetReversible() ) { outReaction.setReversible( inReaction.getReversible() ); } } final Collection<Object[]> resources = new ArrayList<>(); resources.add( new Object[] { "http://identifiers.org/biocyc/" + metaCycFactory.getOrganismId(), CVTerm.Qualifier.BQB_IS_DESCRIBED_BY } ); //$NON-NLS-1$ resources.add( new Object[] { "http://identifiers.org/pubmed/10592180", CVTerm.Qualifier.BQB_IS_DESCRIBED_BY } ); //$NON-NLS-1$ addResources( inModel, resources ); } if( deleteSource ) { SubliminalUtils.delete( metaCycSource ); } } catch( FileNotFoundException e ) { e.printStackTrace(); } } /** * * @param source * @return MetaCycReactionsParser */ private static MetaCycFactory initFactory( final File source ) { final File versionFile = SubliminalUtils.find( source, "version.dat" ); //$NON-NLS-1$ final File reactionsFile = SubliminalUtils.find( source, "reactions.dat" ); //$NON-NLS-1$ final File enzymesFile = SubliminalUtils.find( source, "enzymes.col" ); //$NON-NLS-1$ return new MetaCycFactory( versionFile, reactionsFile, enzymesFile ); } /** * * @param outModel * @param inReaction * @param taxonomyId * @param metaCycEnzymeFactory * @param resources * @return Reaction * @throws Exception */ private static Reaction addReaction( final Model outModel, final Reaction inReaction, final String taxonomyId, final MetaCycFactory metaCycEnzymeFactory ) throws Exception { final String inReactionId = inReaction.getId(); Reaction outReaction = addReaction( outModel, getId( inReactionId ), DEFAULT_COMPARTMENT_ID ); if( outReaction == null ) { outReaction = outModel.createReaction(); outReaction.setId( inReactionId ); outReaction.setName( inReaction.getName() ); for( int l = 0; l < inReaction.getNumReactants(); l++ ) { final SpeciesReference inReactant = inReaction.getReactant( l ); final SpeciesReference outReactant = outReaction.createReactant(); final String speciesId = inReactant.getSpecies(); final Species outSpecies = addSpecies( outModel, getId( speciesId ), inReaction.getModel().getSpecies( speciesId ).getName(), DEFAULT_COMPARTMENT_ID, SubliminalUtils.SBO_SIMPLE_CHEMICAL ); outReactant.setSpecies( outSpecies.getId() ); outReactant.setStoichiometry( inReactant.getStoichiometry() ); } for( int l = 0; l < inReaction.getNumProducts(); l++ ) { final SpeciesReference inProduct = inReaction.getProduct( l ); final SpeciesReference outProduct = outReaction.createProduct(); final String speciesId = inProduct.getSpecies(); final Species outSpecies = addSpecies( outModel, getId( speciesId ), inReaction.getModel().getSpecies( speciesId ).getName(), DEFAULT_COMPARTMENT_ID, SubliminalUtils.SBO_SIMPLE_CHEMICAL ); outProduct.setSpecies( outSpecies.getId() ); outProduct.setStoichiometry( inProduct.getStoichiometry() ); } } final Map<String,Integer> enzymes = metaCycEnzymeFactory.getEnzymes( inReactionId ); final String[] enzymeIds = enzymes.keySet().toArray( new String[ enzymes.keySet().size() ] ); for( String enzymeId : enzymeIds ) { final String formattedEnzymeId = "MetaCyc:" + MetaCycUtils.unencode( enzymeId ); //$NON-NLS-1$ final List<String[]> results = SubliminalUtils.searchUniProt( SubliminalUtils.encodeUniProtSearchTerm( formattedEnzymeId ) + "+AND+taxonomy:" + taxonomyId );//$NON-NLS-1$ addEnzymes( outReaction, results, SubliminalUtils.getNormalisedId( formattedEnzymeId ), enzymeId, new ArrayList<Object[]>() ); } return outReaction; } /** * * @param id * @return String */ private static String getId( final String id ) { String formattedId = id; if( formattedId.contains( COMPARTMENT_SUFFIX ) ) { formattedId = formattedId.substring( 0, id.indexOf( COMPARTMENT_SUFFIX ) ); } return MXN_REF_PREFIX + MetaCycUtils.unencode( formattedId ); } /** * @param args * @throws Exception */ public static void main( String[] args ) throws Exception { if( args.length == 2 ) { MetaCycExtracter.run( args[ 0 ], new File( args[ 1 ] ) ); } else if( args.length == 3 ) { MetaCycExtracter.run( args[ 0 ], new File( args[ 1 ] ), new File( args[ 2 ] ) ); } } }<|fim▁end|>
XmlFormatter.getInstance().write( document, outFile ); SbmlFactory.getInstance().unregister(); }
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 by pyramid_decoy authors and contributors # <see AUTHORS file> # # This module is part of pyramid_decoy and is released under # the MIT License (MIT): http://opensource.org/licenses/MIT """Main decoy module.""" __version__ = "0.2.0" SETTINGS_PREFIX = "decoy" def includeme(configurator): """ Configure decoy plugin on pyramid application. :param pyramid.configurator.Configurator configurator: pyramid's configurator object """ configurator.registry["decoy"] = get_decoy_settings( configurator.get_settings() ) configurator.add_route("decoy", pattern="/*p") configurator.add_view("pyramid_decoy.views.decoy", route_name="decoy") def get_decoy_settings(settings): """<|fim▁hole|> :param dict settings: pyramid app settings :returns: decoy settings :rtype: dict """ return { k.split(".", 1)[-1]: v for k, v in settings.items() if k[: len(SETTINGS_PREFIX)] == SETTINGS_PREFIX }<|fim▁end|>
Extract decoy settings out of all.
<|file_name|>config.ts<|end_file_name|><|fim▁begin|>export const constObjConfig = {<|fim▁hole|>}<|fim▁end|>
"assets": "https://d1fyaep5g8hhgq.cloudfront.net/web/tmjsleeptherapycentre"
<|file_name|>bind-shim.js<|end_file_name|><|fim▁begin|>// PhantomJS is missing Function.prototype.bind: // http://code.google.com/p/phantomjs/issues/detail?id=522 if (!Function.prototype.bind) { Function.prototype.bind = function (oThis) { if (typeof this !== "function") { // closest thing possible to the ECMAScript 5 internal IsCallable function throw new TypeError("Function.prototype.bind - what is trying to be bound is not callable"); } var aArgs = Array.prototype.slice.call(arguments, 1), fToBind = this, fNOP = function () {}, fBound = function () { return fToBind.apply(this instanceof fNOP && oThis ? this : oThis, aArgs.concat(Array.prototype.slice.call(arguments)));<|fim▁hole|> fBound.prototype = new fNOP(); return fBound; }; }<|fim▁end|>
}; fNOP.prototype = this.prototype;
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main import ( "log" "net/http" "github.com/sirupsen/logrus" "github.com/stripe/smokescreen/cmd" "github.com/stripe/smokescreen/pkg/smokescreen" ) // This default implementation of RoleFromRequest uses the CommonName of the // client's certificate. If no certificate is provided, the AllowMissingRole // configuration option will control whether the request is rejected, or the // default ACL is applied. func defaultRoleFromRequest(req *http.Request) (string, error) { if req.TLS == nil { return "", smokescreen.MissingRoleError("defaultRoleFromRequest requires TLS") } if len(req.TLS.PeerCertificates) == 0 { return "", smokescreen.MissingRoleError("client did not provide certificate") } return req.TLS.PeerCertificates[0].Subject.CommonName, nil }<|fim▁hole|> if err != nil { logrus.Fatalf("Could not create configuration: %v", err) } else if conf != nil { conf.RoleFromRequest = defaultRoleFromRequest conf.Log.Formatter = &logrus.JSONFormatter{} adapter := &smokescreen.Log2LogrusWriter{ Entry: conf.Log.WithField("stdlog", "1"), } // Set the standard logger to use our logger's writer as output. log.SetOutput(adapter) log.SetFlags(0) smokescreen.StartWithConfig(conf, nil) } // Otherwise, --help or --version was passed and handled by NewConfiguration, so do nothing }<|fim▁end|>
func main() { conf, err := cmd.NewConfiguration(nil, nil)
<|file_name|>account.rs<|end_file_name|><|fim▁begin|>extern crate meg; use std::env; use std::clone::Clone; use turbo::util::{CliResult, Config};<|fim▁hole|> #[derive(RustcDecodable, Clone)] pub struct Options { pub arg_email: String, pub flag_show: bool, } pub const USAGE: &'static str = " Usage: meg account [options] [<email>] Options: -h, --help Print this message --create Provide an email to create a new account --show Provide an email to show the account -v, --verbose Use verbose output "; pub fn execute(options: Options, _: &Config) -> CliResult<Option<()>> { let vec = env::args().collect::<Vec<_>>(); for x in vec.iter() { if x == "--create" { let mut acct: Act::Createoptions = Act::CreateAcc::new(); acct.email = options.arg_email.clone(); let x = acct.create(); } else if x == "--show" { let mut acct: Show::Showoptions = Show::ShowAcc::new(); //Not reqd - to expand later if acct.email = options.arg_email.clone(); //multiple accounts needs to be showed let x = acct.show(); } } return Ok(None) }<|fim▁end|>
use self::meg::ops::meg_account_create as Act; use self::meg::ops::meg_account_show as Show;
<|file_name|>test_option_value_container.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). <|fim▁hole|>import unittest from pants.option.option_value_container import OptionValueContainer from pants.option.ranked_value import RankedValue class OptionValueContainerTest(unittest.TestCase): def test_standard_values(self): o = OptionValueContainer() o.foo = 1 self.assertEqual(1, o.foo) with self.assertRaises(AttributeError): o.bar def test_value_ranking(self): o = OptionValueContainer() o.foo = RankedValue(RankedValue.CONFIG, 11) self.assertEqual(11, o.foo) self.assertEqual(RankedValue.CONFIG, o.get_rank('foo')) o.foo = RankedValue(RankedValue.HARDCODED, 22) self.assertEqual(11, o.foo) self.assertEqual(RankedValue.CONFIG, o.get_rank('foo')) o.foo = RankedValue(RankedValue.ENVIRONMENT, 33) self.assertEqual(33, o.foo) self.assertEqual(RankedValue.ENVIRONMENT, o.get_rank('foo')) o.foo = 44 # No explicit rank is assumed to be a FLAG. self.assertEqual(44, o.foo) self.assertEqual(RankedValue.FLAG, o.get_rank('foo')) def test_is_flagged(self): o = OptionValueContainer() o.foo = RankedValue(RankedValue.NONE, 11) self.assertFalse(o.is_flagged('foo')) o.foo = RankedValue(RankedValue.CONFIG, 11) self.assertFalse(o.is_flagged('foo')) o.foo = RankedValue(RankedValue.ENVIRONMENT, 11) self.assertFalse(o.is_flagged('foo')) o.foo = RankedValue(RankedValue.FLAG, 11) self.assertTrue(o.is_flagged('foo')) def test_indexing(self): o = OptionValueContainer() o.foo = 1 self.assertEqual(1, o['foo']) self.assertEqual(1, o.get('foo')) self.assertEqual(1, o.get('foo', 2)) self.assertIsNone(o.get('unknown')) self.assertEqual(2, o.get('unknown', 2)) with self.assertRaises(AttributeError): o['bar'] def test_iterator(self): o = OptionValueContainer() o.a = 3 o.b = 2 o.c = 1 names = list(iter(o)) self.assertListEqual(['a', 'b', 'c'], names) def test_copy(self): # copy semantics can get hairy when overriding __setattr__/__getattr__, so we test them. o = OptionValueContainer() o.foo = 1 o.bar = {'a': 111} p = copy.copy(o) # Verify that the result is in fact a copy. self.assertEqual(1, p.foo) # Has original attribute. o.baz = 42 self.assertFalse(hasattr(p, 'baz')) # Does not have attribute added after the copy. # Verify that it's a shallow copy by modifying a referent in o and reading it in p. o.bar['b'] = 222 self.assertEqual({'a': 111, 'b': 222}, p.bar) def test_deepcopy(self): # copy semantics can get hairy when overriding __setattr__/__getattr__, so we test them. o = OptionValueContainer() o.foo = 1 o.bar = {'a': 111} p = copy.deepcopy(o) # Verify that the result is in fact a copy. self.assertEqual(1, p.foo) # Has original attribute. o.baz = 42 self.assertFalse(hasattr(p, 'baz')) # Does not have attribute added after the copy. # Verify that it's a deep copy by modifying a referent in o and reading it in p. o.bar['b'] = 222 self.assertEqual({'a': 111}, p.bar)<|fim▁end|>
from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import copy
<|file_name|>sensible-app.js<|end_file_name|><|fim▁begin|>// sensible server which advertises itself via Bonjour // NODE INCLUDES var dgram = require ("dgram"); var fs = require ("fs"); var http = require ("http"); var os = require ("os"); var url = require ("url"); // REGULAR JS INCLUDES <|fim▁hole|>eval (code.toString ()); // MAINLINE sensible.ApplicationFactory.createApplication ( function (inError) { if (inError) { console.error ("error during sensible application startup"); console.error (inError); } else { console.log ("sensible application startup"); } } ); // called just before sensible.Application.start() sensible.node.Application.prototype.onBeforeStart = function (inCallback) { console.log ("node.Application.onBeforeStart()"); inCallback (); } // called just after sensible.Application.start() sensible.node.Application.prototype.onAfterStart = function (inCallback) { console.log ("node.Application.onAfterStart()"); inCallback (); }<|fim▁end|>
// assume that sensible.js lives in the same directory as our mainline var code = fs.readFileSync (require ("path").dirname (process.argv [1]) + "/sensible.js");
<|file_name|>SirenFieldAttribute.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2015 fjz13. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. #include "MedusaCorePreCompiled.h" #include "SirenFieldAttribute.h" #include "Core/IO/Stream/IStream.h" #include "Core/Log/Log.h" MEDUSA_BEGIN; SirenFieldAttribute::~SirenFieldAttribute(void) { } bool SirenFieldAttribute::IsRequired() const { return !MEDUSA_FLAG_HAS(mMode, SirenFieldGenerateMode::Optional); } bool SirenFieldAttribute::OnLoaded() { StringPropertySet copy = mKeyValues; if (mKeyValues.RemoveKey("Optional")) { MEDUSA_FLAG_ADD(mMode, SirenFieldGenerateMode::Optional); } if (mKeyValues.RemoveKey("?")) { MEDUSA_FLAG_ADD(mMode, SirenFieldGenerateMode::Optional);<|fim▁hole|> } if (mKeyValues.RemoveKey("+")) { MEDUSA_FLAG_REMOVE(mMode, SirenFieldGenerateMode::Optional); } if (mKeyValues.RemoveKey("ForceKeyToPtr")) { MEDUSA_FLAG_ADD(mMode, SirenFieldGenerateMode::ForceKeyToPtr); } if (mKeyValues.RemoveKey("ForceValueToPtr")) { MEDUSA_FLAG_ADD(mMode, SirenFieldGenerateMode::ForceValueToPtr); } if (mKeyValues.RemoveKey("AddDictionaryMethods")) { MEDUSA_FLAG_ADD(mMode, SirenFieldGenerateMode::AddDictionaryMethods); } if (mKeyValues.RemoveKey("SuppressMethod")) { MEDUSA_FLAG_ADD(mMode, SirenFieldGenerateMode::SuppressMethod); } return true; } StringRef SirenFieldAttribute::Modifier() const { if (IsRequired()) { return "Required"; } return "Optional"; } bool SirenFieldAttribute::LoadFrom(IStream& stream) { RETURN_FALSE_IF_FALSE(ISirenAttribute::LoadFrom(stream)); mMode = stream.Read<SirenFieldGenerateMode>(); return true; } bool SirenFieldAttribute::SaveTo(IStream& stream) const { RETURN_FALSE_IF_FALSE(ISirenAttribute::SaveTo(stream)); stream.Write(mMode); return true; } MEDUSA_END;<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>""" This module instructs the setuptools to setpup this package properly :copyright: (c) 2016 by Mehdy Khoshnoody. :license: GPLv3, see LICENSE for more details. """ import os from distutils.core import setup<|fim▁hole|> packages=['pyeez'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Software Development :: Libraries :: Python Modules', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], keywords='terminal console', url='https://github.com/mehdy/pyeez', license='GPLv3', author='Mehdy Khoshnoody', author_email='[email protected]', description='A micro-framework to create console-based applications like' 'htop, vim and etc' )<|fim▁end|>
setup( name='pyeez', version='0.1.0',
<|file_name|>dice.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import random<|fim▁hole|>def roll(qty, sides): return sum(random.randrange(1, sides + 1) for _ in range(qty)) def chances(qty, out_of): """'1 chance in 5' is chances(1, 5)""" # Chance succeeds if roll is lower than max. return roll(1, out_of) <= qty<|fim▁end|>
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. //! Common utilities for computation kernels. use crate::array::*; use crate::bitmap::Bitmap; use crate::buffer::Buffer; use crate::error::Result; /// Applies a given binary operation, `op`, to two references to `Option<Bitmap>`'s. /// /// This function is useful when implementing operations on higher level arrays. pub(crate) fn apply_bin_op_to_option_bitmap<F>( left: &Option<Bitmap>, right: &Option<Bitmap>, op: F, ) -> Result<Option<Buffer>> where F: Fn(&Buffer, &Buffer) -> Result<Buffer>, { match *left { None => match *right { None => Ok(None), Some(ref r) => Ok(Some(r.bits.clone())), }, Some(ref l) => match *right { None => Ok(Some(l.bits.clone())), Some(ref r) => Ok(Some(op(&l.bits, &r.bits)?)), }, } } /// Takes/filters a list array's inner data using the offsets of the list array. /// /// Where a list array has indices `[0,2,5,10]`, taking indices of `[2,0]` returns /// an array of the indices `[5..10, 0..2]` and offsets `[0,5,7]` (5 elements and 2 /// elements) pub(super) fn take_value_indices_from_list( values: &ArrayRef, indices: &UInt32Array, ) -> (UInt32Array, Vec<i32>) { // TODO: benchmark this function, there might be a faster unsafe alternative // get list array's offsets let list: &ListArray = values.as_any().downcast_ref::<ListArray>().unwrap(); let offsets: Vec<u32> = (0..=list.len()) .map(|i| list.value_offset(i) as u32) .collect(); let mut new_offsets = Vec::with_capacity(indices.len()); let mut values = Vec::new(); let mut current_offset = 0; // add first offset new_offsets.push(0); // compute the value indices, and set offsets accordingly for i in 0..indices.len() { if indices.is_valid(i) { let ix = indices.value(i) as usize; let start = offsets[ix]; let end = offsets[ix + 1]; current_offset += (end - start) as i32; new_offsets.push(current_offset); // if start == end, this slot is empty if start != end { // type annotation needed to guide compiler a bit let mut offsets: Vec<Option<u32>> = (start..end).map(|v| Some(v)).collect::<Vec<Option<u32>>>(); values.append(&mut offsets); } } else { new_offsets.push(current_offset); } } (UInt32Array::from(values), new_offsets) } <|fim▁hole|> use super::*; use std::sync::Arc; use crate::array::ArrayData; use crate::datatypes::{DataType, ToByteSlice}; #[test] fn test_apply_bin_op_to_option_bitmap() { assert_eq!( Ok(None), apply_bin_op_to_option_bitmap(&None, &None, |a, b| a & b) ); assert_eq!( Ok(Some(Buffer::from([0b01101010]))), apply_bin_op_to_option_bitmap( &Some(Bitmap::from(Buffer::from([0b01101010]))), &None, |a, b| a & b ) ); assert_eq!( Ok(Some(Buffer::from([0b01001110]))), apply_bin_op_to_option_bitmap( &None, &Some(Bitmap::from(Buffer::from([0b01001110]))), |a, b| a & b ) ); assert_eq!( Ok(Some(Buffer::from([0b01001010]))), apply_bin_op_to_option_bitmap( &Some(Bitmap::from(Buffer::from([0b01101010]))), &Some(Bitmap::from(Buffer::from([0b01001110]))), |a, b| a & b ) ); } #[test] fn test_take_value_index_from_list() { let value_data = Int32Array::from((0..10).collect::<Vec<i32>>()).data(); let value_offsets = Buffer::from(&[0, 2, 5, 10].to_byte_slice()); let list_data_type = DataType::List(Box::new(DataType::Int32)); let list_data = ArrayData::builder(list_data_type.clone()) .len(3) .add_buffer(value_offsets.clone()) .add_child_data(value_data.clone()) .build(); let array = Arc::new(ListArray::from(list_data)) as ArrayRef; let index = UInt32Array::from(vec![2, 0]); let (indexed, offsets) = take_value_indices_from_list(&array, &index); assert_eq!(vec![0, 5, 7], offsets); let data = UInt32Array::from(vec![ Some(5), Some(6), Some(7), Some(8), Some(9), Some(0), Some(1), ]) .data(); assert_eq!(data, indexed.data()); } }<|fim▁end|>
#[cfg(test)] mod tests {
<|file_name|>hparams_lib_test.py<|end_file_name|><|fim▁begin|># Copyright 2019 Verily Life Sciences LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for hparams_lib.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from classifaedes import hparams_lib import tensorflow.compat.v1 as tf<|fim▁hole|> class HparamsLibTest(tf.test.TestCase): def testIndentedSerialize(self): """Tests that our slightly customized serialization can be parsed. hparams_lib._human_serialize() uses indented JSON to improve readability. """ hps1 = hparams_lib.defaults() serialized = hparams_lib._human_serialize(hps1) hps2 = hparams_lib.defaults() hps2.parse_json(serialized) self.assertDictEqual(hps1.values(), hps2.values()) if __name__ == '__main__': tf.test.main()<|fim▁end|>
<|file_name|>facility.py<|end_file_name|><|fim▁begin|>from chronotope.model.base import PublicationWorkflowBehavior from chronotope.model.base import SQLBase from chronotope.model.category import CategoryRecord from chronotope.model.location import LocationRecord from chronotope.utils import ensure_uuid from chronotope.utils import html_index_transform from cone.app.model import Metadata from cone.app.model import Properties from cone.app.model import node_info from cone.sql import get_session from cone.sql import metadata from cone.sql.model import GUID from cone.sql.model import SQLRowNode from cone.sql.model import SQLTableNode from node.utils import instance_property from plumber import plumbing from pyramid.i18n import TranslationStringFactory from sqlalchemy import Column from sqlalchemy import DateTime from sqlalchemy import ForeignKey from sqlalchemy import String from sqlalchemy import Table<|fim▁hole|> _ = TranslationStringFactory('chronotope') facility_location_references = Table( 'facility_location_references', metadata, Column('facility_uid', GUID, ForeignKey('facility.uid')), Column('location_uid', GUID, ForeignKey('location.uid')) ) facility_category_references = Table( 'facility_category_references', metadata, Column('facility_uid', GUID, ForeignKey('facility.uid')), Column('category_uid', GUID, ForeignKey('category.uid')) ) class FacilityRecord(SQLBase): __tablename__ = 'facility' __index_attrs__ = ['title', 'description'] __index_transforms__ = { 'description': html_index_transform, } uid = Column(GUID, primary_key=True) submitter = Column(String) creator = Column(String) created = Column(DateTime) modified = Column(DateTime) state = Column(String) title = Column(String) description = Column(String) exists_from = Column(String) exists_to = Column(String) category = relationship( CategoryRecord, secondary=facility_category_references, backref='facility') location = relationship( LocationRecord, secondary=facility_location_references, backref='facility') def facility_by_uid(request, uid): session = get_session(request) return session.query(FacilityRecord).get(ensure_uuid(uid)) def facilities_by_uid(request, uids): if not uids: return list() uids = [ensure_uuid(uid) for uid in uids] session = get_session(request) return session.query(FacilityRecord)\ .filter(FacilityRecord.uid.in_(uids))\ .all() def search_facilities(request, term, state=[], submitter=None, limit=None): session = get_session(request) query = session.query(FacilityRecord) query = query.filter(FacilityRecord.title.like(u'%{0}%'.format(term))) if state: query = query.filter(FacilityRecord.state.in_(state)) if submitter: query = query.filter(FacilityRecord.submitter == submitter) query = query.order_by(FacilityRecord.title) if limit is not None: query = query.limit(limit) return query.all() @node_info( name='facility', title=_('facility_label', default='Facility'), description=_('facility_description', default='A Facility'), icon='glyphicon glyphicon-home') @plumbing(PublicationWorkflowBehavior) class Facility(SQLRowNode): record_class = FacilityRecord @instance_property def properties(self): props = super(Facility, self).properties props.action_up = True props.action_up_tile = 'listing' props.action_view = True props.action_edit = True props.action_delete = True return props @property def metadata(self): md = Metadata() md.title = self.attrs['title'] md.description = self.attrs['description'] md.creator = self.attrs['creator'] md.created = self.attrs['created'] md.modified = self.attrs['modified'] return md @node_info( name='facilities', title=_('facilities_label', default='Facilities'), description=_( 'facilities_description', default='Container for Facilities' ), icon='glyphicon glyphicon-record', addables=['facility']) class Facilities(SQLTableNode): record_class = FacilityRecord child_factory = Facility @instance_property def properties(self): props = Properties() props.in_navtree = True props.action_up = True props.action_up_tile = 'content' props.action_add = True props.default_content_tile = 'listing' return props @instance_property def metadata(self): md = Metadata() md.title = _('facilities_label', default='Facilities') md.description = _( 'facilities_description', default='Container for Facilities' ) return md<|fim▁end|>
from sqlalchemy.orm import relationship
<|file_name|>commit_status.go<|end_file_name|><|fim▁begin|>// Copyright 2019 The Gitea Authors. // All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. package pull import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/structs" "github.com/pkg/errors" ) // MergeRequiredContextsCommitStatus returns a commit status state for given required contexts func MergeRequiredContextsCommitStatus(commitStatuses []*models.CommitStatus, requiredContexts []string) structs.CommitStatusState { if len(requiredContexts) == 0 { status := models.CalcCommitStatus(commitStatuses) if status != nil { return status.State } return structs.CommitStatusSuccess } var returnedStatus = structs.CommitStatusSuccess for _, ctx := range requiredContexts { var targetStatus structs.CommitStatusState for _, commitStatus := range commitStatuses { if commitStatus.Context == ctx { targetStatus = commitStatus.State break } } if targetStatus == "" { targetStatus = structs.CommitStatusPending commitStatuses = append(commitStatuses, &models.CommitStatus{ State: targetStatus, Context: ctx, Description: "Pending", }) } if targetStatus.NoBetterThan(returnedStatus) { returnedStatus = targetStatus } } return returnedStatus } // IsCommitStatusContextSuccess returns true if all required status check contexts succeed. func IsCommitStatusContextSuccess(commitStatuses []*models.CommitStatus, requiredContexts []string) bool { // If no specific context is required, require that last commit status is a success if len(requiredContexts) == 0 { status := models.CalcCommitStatus(commitStatuses) if status == nil || status.State != structs.CommitStatusSuccess { return false } return true } for _, ctx := range requiredContexts { var found bool for _, commitStatus := range commitStatuses { if commitStatus.Context == ctx { if commitStatus.State != structs.CommitStatusSuccess { return false } found = true break } } if !found { return false } } return true } // IsPullCommitStatusPass returns if all required status checks PASS func IsPullCommitStatusPass(pr *models.PullRequest) (bool, error) { if err := pr.LoadProtectedBranch(); err != nil { return false, errors.Wrap(err, "GetLatestCommitStatus")<|fim▁hole|> return true, nil } state, err := GetPullRequestCommitStatusState(pr) if err != nil { return false, err } return state.IsSuccess(), nil } // GetPullRequestCommitStatusState returns pull request merged commit status state func GetPullRequestCommitStatusState(pr *models.PullRequest) (structs.CommitStatusState, error) { // Ensure HeadRepo is loaded if err := pr.LoadHeadRepo(); err != nil { return "", errors.Wrap(err, "LoadHeadRepo") } // check if all required status checks are successful headGitRepo, err := git.OpenRepository(pr.HeadRepo.RepoPath()) if err != nil { return "", errors.Wrap(err, "OpenRepository") } defer headGitRepo.Close() if !headGitRepo.IsBranchExist(pr.HeadBranch) { return "", errors.New("Head branch does not exist, can not merge") } sha, err := headGitRepo.GetBranchCommitID(pr.HeadBranch) if err != nil { return "", errors.Wrap(err, "GetBranchCommitID") } if err := pr.LoadBaseRepo(); err != nil { return "", errors.Wrap(err, "LoadBaseRepo") } commitStatuses, err := models.GetLatestCommitStatus(pr.BaseRepo, sha, 0) if err != nil { return "", errors.Wrap(err, "GetLatestCommitStatus") } return MergeRequiredContextsCommitStatus(commitStatuses, pr.ProtectedBranch.StatusCheckContexts), nil }<|fim▁end|>
} if pr.ProtectedBranch == nil || !pr.ProtectedBranch.EnableStatusCheck {
<|file_name|>unboxed-closure-sugar-wrong-number-number-type-parameters.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at<|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(unboxed_closures)] trait Zero { fn dummy(&self); } fn foo(_: Zero()) //~^ ERROR wrong number of type arguments //~| ERROR no associated type `Output` defined in `Zero` {} fn main() { }<|fim▁end|>
// http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
<|file_name|>helper.py<|end_file_name|><|fim▁begin|>from __future__ import division from __future__ import unicode_literals from builtins import range from past.utils import old_div import hashlib import os import random import string import tempfile import re import time import urllib from datetime import datetime from datetime import timedelta from elodie.compatability import _rename from elodie.external.pyexiftool import ExifTool from elodie.dependencies import get_exiftool from elodie import constants def checksum(file_path, blocksize=65536): hasher = hashlib.sha256() with open(file_path, 'rb') as f: buf = f.read(blocksize) while len(buf) > 0: hasher.update(buf) buf = f.read(blocksize) return hasher.hexdigest() return None def create_working_folder(format=None): temporary_folder = tempfile.gettempdir() folder = os.path.join(temporary_folder, random_string(10, format), random_string(10, format)) os.makedirs(folder) return (temporary_folder, folder) def download_file(name, destination): try: url_to_file = 'https://s3.amazonaws.com/jmathai/github/elodie/{}'.format(name) # urlretrieve works differently for python 2 and 3 if constants.python_version < 3: final_name = '{}/{}{}'.format(destination, random_string(10), os.path.splitext(name)[1]) urllib.urlretrieve( url_to_file, final_name ) else: final_name, headers = urllib.request.urlretrieve(url_to_file) return final_name except Exception as e: return False def get_file(name): file_path = get_file_path(name) if not os.path.isfile(file_path): return False return file_path def get_file_path(name): current_folder = os.path.dirname(os.path.realpath(__file__)) return os.path.join(current_folder, 'files', name) def get_test_location():<|fim▁hole|> def populate_folder(number_of_files, include_invalid=False): folder = '%s/%s' % (tempfile.gettempdir(), random_string(10)) os.makedirs(folder) for x in range(0, number_of_files): ext = 'jpg' if x % 2 == 0 else 'txt' fname = '%s/%s.%s' % (folder, x, ext) with open(fname, 'a'): os.utime(fname, None) if include_invalid: fname = '%s/%s' % (folder, 'invalid.invalid') with open(fname, 'a'): os.utime(fname, None) return folder def random_string(length, format=None): format_choice = string.ascii_uppercase + string.digits if format == 'int': format_choice = string.digits elif format == 'str': format_choice = string.asci_uppercase return ''.join(random.SystemRandom().choice(format_choice) for _ in range(length)) def random_decimal(): return random.random() def random_coordinate(coordinate, precision): # Here we add to the decimal section of the coordinate by a given precision return coordinate + ((old_div(10.0, (10.0**precision))) * random_decimal()) def temp_dir(): return tempfile.gettempdir() def is_windows(): return os.name == 'nt' # path_tz_fix(file_name) # Change timestamp in file_name by the offset # between UTC and local time, i.e. # 2015-12-05_00-59-26-with-title-some-title.jpg -> # 2015-12-04_20-59-26-with-title-some-title.jpg # (Windows only) def path_tz_fix(file_name): if is_windows(): # Calculate the offset between UTC and local time tz_shift = old_div((datetime.fromtimestamp(0) - datetime.utcfromtimestamp(0)).seconds,3600) # replace timestamp in file_name m = re.search('(\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2})',file_name) t_date = datetime.fromtimestamp(time.mktime(time.strptime(m.group(0), '%Y-%m-%d_%H-%M-%S'))) s_date_fix = (t_date-timedelta(hours=tz_shift)).strftime('%Y-%m-%d_%H-%M-%S') return re.sub('\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}',s_date_fix,file_name) else: return file_name # time_convert(s_time) # Change s_time (struct_time) by the offset # between UTC and local time # (Windows only) def time_convert(s_time): if is_windows(): return time.gmtime((time.mktime(s_time))) else: return s_time # isclose(a,b,rel_tol) # To compare float coordinates a and b # with relative tolerance c def isclose(a, b, rel_tol = 1e-8): if not isinstance(a, (int, float)) or not isinstance(b, (int, float)): return False diff = abs(a - b) return (diff <= abs(rel_tol * a) and diff <= abs(rel_tol * b)) def reset_dbs(): """ Back up hash_db and location_db """ # This is no longer needed. See gh-322 # https://github.com/jmathai/elodie/issues/322 pass def restore_dbs(): """ Restore back ups of hash_db and location_db """ # This is no longer needed. See gh-322 # https://github.com/jmathai/elodie/issues/322 pass def setup_module(): exiftool_addedargs = [ u'-config', u'"{}"'.format(constants.exiftool_config) ] ExifTool(executable_=get_exiftool(), addedargs=exiftool_addedargs).start() def teardown_module(): ExifTool().terminate<|fim▁end|>
return (61.013710, 99.196656, 'Siberia')
<|file_name|>position.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ <%! from data import to_rust_ident %> <%namespace name="helpers" file="/helpers.mako.rs" /> <% from data import ALL_SIZES, PHYSICAL_SIDES, LOGICAL_SIDES %> <% data.new_style_struct("Position", inherited=False) %> // "top" / "left" / "bottom" / "right" % for side in PHYSICAL_SIDES: ${helpers.predefined_type( side, "LengthOrPercentageOrAuto", "computed::LengthOrPercentageOrAuto::Auto", spec="https://www.w3.org/TR/CSS2/visuren.html#propdef-%s" % side, flags="GETCS_NEEDS_LAYOUT_FLUSH", animation_value_type="ComputedValue",<|fim▁hole|> )} % endfor // inset-* logical properties, map to "top" / "left" / "bottom" / "right" % for side in LOGICAL_SIDES: ${helpers.predefined_type( "inset-%s" % side, "LengthOrPercentageOrAuto", "computed::LengthOrPercentageOrAuto::Auto", spec="https://drafts.csswg.org/css-logical-props/#propdef-inset-%s" % side, flags="GETCS_NEEDS_LAYOUT_FLUSH", alias="offset-%s:layout.css.offset-logical-properties.enabled" % side, animation_value_type="ComputedValue", logical=True, logical_group="inset", )} % endfor #[cfg(feature = "gecko")] macro_rules! impl_align_conversions { ($name: path) => { impl From<u8> for $name { fn from(bits: u8) -> $name { $name(::values::specified::align::AlignFlags::from_bits(bits) .expect("bits contain valid flag")) } } impl From<$name> for u8 { fn from(v: $name) -> u8 { v.0.bits() } } }; } ${helpers.predefined_type( "z-index", "ZIndex", "computed::ZIndex::auto()", spec="https://www.w3.org/TR/CSS2/visuren.html#z-index", flags="CREATES_STACKING_CONTEXT", animation_value_type="ComputedValue", )} // CSS Flexible Box Layout Module Level 1 // http://www.w3.org/TR/css3-flexbox/ // Flex container properties ${helpers.single_keyword("flex-direction", "row row-reverse column column-reverse", spec="https://drafts.csswg.org/css-flexbox/#flex-direction-property", extra_prefixes="webkit", animation_value_type="discrete", servo_restyle_damage = "reflow")} ${helpers.single_keyword("flex-wrap", "nowrap wrap wrap-reverse", spec="https://drafts.csswg.org/css-flexbox/#flex-wrap-property", extra_prefixes="webkit", animation_value_type="discrete", servo_restyle_damage = "reflow")} % if product == "servo": // FIXME: Update Servo to support the same Syntax as Gecko. ${helpers.single_keyword("justify-content", "flex-start stretch flex-end center space-between space-around", extra_prefixes="webkit", spec="https://drafts.csswg.org/css-align/#propdef-justify-content", animation_value_type="discrete", servo_restyle_damage = "reflow")} % else: ${helpers.predefined_type(name="justify-content", type="JustifyContent", initial_value="specified::JustifyContent(specified::ContentDistribution::normal())", spec="https://drafts.csswg.org/css-align/#propdef-justify-content", extra_prefixes="webkit", animation_value_type="discrete", servo_restyle_damage = "reflow")} % endif % if product == "servo": // FIXME: Update Servo to support the same Syntax as Gecko. ${helpers.single_keyword("align-content", "stretch flex-start flex-end center space-between space-around", extra_prefixes="webkit", spec="https://drafts.csswg.org/css-align/#propdef-align-content", animation_value_type="discrete", servo_restyle_damage = "reflow")} ${helpers.single_keyword("align-items", "stretch flex-start flex-end center baseline", extra_prefixes="webkit", spec="https://drafts.csswg.org/css-flexbox/#align-items-property", animation_value_type="discrete", servo_restyle_damage = "reflow")} % else: ${helpers.predefined_type(name="align-content", type="AlignContent", initial_value="specified::AlignContent(specified::ContentDistribution::normal())", spec="https://drafts.csswg.org/css-align/#propdef-align-content", extra_prefixes="webkit", animation_value_type="discrete", servo_restyle_damage = "reflow")} ${helpers.predefined_type(name="align-items", type="AlignItems", initial_value="specified::AlignItems::normal()", spec="https://drafts.csswg.org/css-align/#propdef-align-items", extra_prefixes="webkit", animation_value_type="discrete", servo_restyle_damage = "reflow")} #[cfg(feature = "gecko")] impl_align_conversions!(::values::specified::align::AlignItems); ${helpers.predefined_type( name="justify-items", type="JustifyItems", initial_value="computed::JustifyItems::legacy()", spec="https://drafts.csswg.org/css-align/#propdef-justify-items", animation_value_type="discrete", )} #[cfg(feature = "gecko")] impl_align_conversions!(::values::specified::align::JustifyItems); % endif // Flex item properties ${helpers.predefined_type("flex-grow", "NonNegativeNumber", "From::from(0.0)", spec="https://drafts.csswg.org/css-flexbox/#flex-grow-property", extra_prefixes="webkit", animation_value_type="NonNegativeNumber", servo_restyle_damage = "reflow")} ${helpers.predefined_type("flex-shrink", "NonNegativeNumber", "From::from(1.0)", spec="https://drafts.csswg.org/css-flexbox/#flex-shrink-property", extra_prefixes="webkit", animation_value_type="NonNegativeNumber", servo_restyle_damage = "reflow")} // https://drafts.csswg.org/css-align/#align-self-property % if product == "servo": // FIXME: Update Servo to support the same syntax as Gecko. ${helpers.single_keyword("align-self", "auto stretch flex-start flex-end center baseline", extra_prefixes="webkit", spec="https://drafts.csswg.org/css-flexbox/#propdef-align-self", animation_value_type="discrete", servo_restyle_damage = "reflow")} % else: ${helpers.predefined_type(name="align-self", type="AlignSelf", initial_value="specified::AlignSelf(specified::SelfAlignment::auto())", spec="https://drafts.csswg.org/css-align/#align-self-property", extra_prefixes="webkit", animation_value_type="discrete")} ${helpers.predefined_type(name="justify-self", type="JustifySelf", initial_value="specified::JustifySelf(specified::SelfAlignment::auto())", spec="https://drafts.csswg.org/css-align/#justify-self-property", animation_value_type="discrete")} #[cfg(feature = "gecko")] impl_align_conversions!(::values::specified::align::SelfAlignment); % endif // https://drafts.csswg.org/css-flexbox/#propdef-order ${helpers.predefined_type("order", "Integer", "0", extra_prefixes="webkit", animation_value_type="ComputedValue", spec="https://drafts.csswg.org/css-flexbox/#order-property", servo_restyle_damage = "reflow")} ${helpers.predefined_type( "flex-basis", "FlexBasis", "computed::FlexBasis::auto()", spec="https://drafts.csswg.org/css-flexbox/#flex-basis-property", extra_prefixes="webkit", animation_value_type="FlexBasis", servo_restyle_damage = "reflow" )} % for (size, logical) in ALL_SIZES: <% spec = "https://drafts.csswg.org/css-box/#propdef-%s" if logical: spec = "https://drafts.csswg.org/css-logical-props/#propdef-%s" %> // NOTE: Block-size doesn't support -moz-*-content keywords, since they make // no sense on the block axis, but it simplifies things the have that it has // the same type as the other properties, since otherwise we'd need to // handle logical props where the types are different, which looks like a // pain. % if product == "gecko": <% parse_function = "parse" if size != "block-size" else "parse_disallow_keyword" %> // width, height, block-size, inline-size ${helpers.predefined_type( size, "MozLength", "computed::MozLength::auto()", parse_function, logical=logical, logical_group="size", allow_quirks=not logical, spec=spec % size, animation_value_type="MozLength", flags="GETCS_NEEDS_LAYOUT_FLUSH", servo_restyle_damage="reflow" )} // min-width, min-height, min-block-size, min-inline-size, ${helpers.predefined_type( "min-%s" % size, "MozLength", "computed::MozLength::auto()", parse_function, logical=logical, logical_group="min-size", allow_quirks=not logical, spec=spec % size, animation_value_type="MozLength", servo_restyle_damage = "reflow" )} ${helpers.predefined_type( "max-%s" % size, "MaxLength", "computed::MaxLength::none()", parse_function, logical=logical, logical_group="max-size", allow_quirks=not logical, spec=spec % size, animation_value_type="MaxLength", servo_restyle_damage = "reflow" )} % else: // servo versions (no keyword support) ${helpers.predefined_type( size, "LengthOrPercentageOrAuto", "computed::LengthOrPercentageOrAuto::Auto", "parse_non_negative", spec=spec % size, logical_group="size", allow_quirks=not logical, animation_value_type="ComputedValue", logical = logical, servo_restyle_damage = "reflow", )} ${helpers.predefined_type( "min-%s" % size, "LengthOrPercentage", "computed::LengthOrPercentage::Length(computed::Length::new(0.))", "parse_non_negative", spec=spec % ("min-%s" % size), logical_group="min-size", animation_value_type="ComputedValue", logical=logical, allow_quirks=not logical, servo_restyle_damage = "reflow", )} ${helpers.predefined_type( "max-%s" % size, "LengthOrPercentageOrNone", "computed::LengthOrPercentageOrNone::None", "parse_non_negative", spec=spec % ("max-%s" % size), logical_group="max-size", animation_value_type="ComputedValue", logical=logical, allow_quirks=not logical, servo_restyle_damage = "reflow", )} % endif % endfor ${helpers.single_keyword("box-sizing", "content-box border-box", extra_prefixes="moz:layout.css.prefixes.box-sizing webkit", spec="https://drafts.csswg.org/css-ui/#propdef-box-sizing", gecko_enum_prefix="StyleBoxSizing", custom_consts={ "content-box": "Content", "border-box": "Border" }, animation_value_type="discrete", servo_restyle_damage = "reflow")} ${helpers.single_keyword("object-fit", "fill contain cover none scale-down", products="gecko", animation_value_type="discrete", spec="https://drafts.csswg.org/css-images/#propdef-object-fit")} ${helpers.predefined_type("object-position", "Position", "computed::Position::zero()", products="gecko", boxed=True, spec="https://drafts.csswg.org/css-images-3/#the-object-position", animation_value_type="ComputedValue")} % for kind in ["row", "column"]: % for range in ["start", "end"]: ${helpers.predefined_type("grid-%s-%s" % (kind, range), "GridLine", "Default::default()", animation_value_type="discrete", spec="https://drafts.csswg.org/css-grid/#propdef-grid-%s-%s" % (kind, range), products="gecko", boxed=True)} % endfor // NOTE: According to the spec, this should handle multiple values of `<track-size>`, // but gecko supports only a single value ${helpers.predefined_type("grid-auto-%ss" % kind, "TrackSize", "Default::default()", animation_value_type="discrete", spec="https://drafts.csswg.org/css-grid/#propdef-grid-auto-%ss" % kind, products="gecko", boxed=True)} ${helpers.predefined_type( "grid-template-%ss" % kind, "GridTemplateComponent", "specified::GenericGridTemplateComponent::None", products="gecko", spec="https://drafts.csswg.org/css-grid/#propdef-grid-template-%ss" % kind, boxed=True, flags="GETCS_NEEDS_LAYOUT_FLUSH", animation_value_type="discrete" )} % endfor ${helpers.predefined_type("grid-auto-flow", "GridAutoFlow", initial_value="computed::GridAutoFlow::row()", products="gecko", animation_value_type="discrete", spec="https://drafts.csswg.org/css-grid/#propdef-grid-auto-flow")} ${helpers.predefined_type("grid-template-areas", "GridTemplateAreas", initial_value="computed::GridTemplateAreas::none()", products="gecko", animation_value_type="discrete", spec="https://drafts.csswg.org/css-grid/#propdef-grid-template-areas")} ${helpers.predefined_type("column-gap", "length::NonNegativeLengthOrPercentageOrNormal", "Either::Second(Normal)", alias="grid-column-gap" if product == "gecko" else "", extra_prefixes="moz", servo_pref="layout.columns.enabled", spec="https://drafts.csswg.org/css-align-3/#propdef-column-gap", animation_value_type="NonNegativeLengthOrPercentageOrNormal", servo_restyle_damage = "reflow")} // no need for -moz- prefixed alias for this property ${helpers.predefined_type("row-gap", "length::NonNegativeLengthOrPercentageOrNormal", "Either::Second(Normal)", alias="grid-row-gap", products="gecko", spec="https://drafts.csswg.org/css-align-3/#propdef-row-gap", animation_value_type="NonNegativeLengthOrPercentageOrNormal", servo_restyle_damage = "reflow")}<|fim▁end|>
allow_quirks=True, servo_restyle_damage="reflow_out_of_flow", logical_group="inset",
<|file_name|>categories.py<|end_file_name|><|fim▁begin|>from django.core.urlresolvers import reverse from django.utils.translation import ugettext_lazy as _ from olympia.constants.applications import ( ANDROID, FIREFOX, SEAMONKEY, THUNDERBIRD) from olympia.constants.base import ( ADDON_DICT, ADDON_EXTENSION, ADDON_LPAPP, ADDON_PERSONA, ADDON_SEARCH, ADDON_SLUGS, ADDON_STATICTHEME, ADDON_THEME) class StaticCategory(object): """Helper to populate `CATEGORIES` and provide some helpers. Note that any instance is immutable to avoid changing values on the globally unique instances during test runs which can lead to hard to debug sporadic test-failures. """ def __init__(self, id=None, app=None, type=None, misc=False, name=None, slug=None, weight=0, description=None): # Avoid triggering our own __setattr__ implementation # to keep immutability intact but set initial values. object.__setattr__(self, 'id', id) object.__setattr__(self, 'application', app) object.__setattr__(self, 'misc', misc) object.__setattr__(self, 'name', name) object.__setattr__(self, 'slug', slug) object.__setattr__(self, 'type', type) object.__setattr__(self, 'weight', weight) object.__setattr__(self, 'description', description) def __unicode__(self): return unicode(self.name) def __repr__(self): return u'<%s: %s (%s)>' % ( self.__class__.__name__, self.__unicode__(), self.application) def get_url_path(self): try: type = ADDON_SLUGS[self.type] except KeyError: type = ADDON_SLUGS[ADDON_EXTENSION] return reverse('browse.%s' % type, args=[self.slug]) def _immutable(self, *args): raise TypeError('%r instances are immutable' % self.__class__.__name__) __setattr__ = __delattr__ = _immutable del _immutable CATEGORIES = { FIREFOX.id: { ADDON_EXTENSION: { 'alerts-updates': StaticCategory( id=72, name=_(u'Alerts & Updates')), 'appearance': StaticCategory(id=14, name=_(u'Appearance')), 'bookmarks': StaticCategory(id=22, name=_(u'Bookmarks')), 'download-management': StaticCategory( id=5, name=_(u'Download Management')), 'feeds-news-blogging': StaticCategory( id=1, name=_(u'Feeds, News & Blogging')), 'games-entertainment': StaticCategory( id=142, name=_(u'Games & Entertainment')), 'language-support': StaticCategory( id=37, name=_(u'Language Support')), 'photos-music-videos': StaticCategory( id=38, name=_(u'Photos, Music & Videos')), 'privacy-security': StaticCategory( id=12, name=_(u'Privacy & Security')), 'search-tools': StaticCategory(id=13, name=_(u'Search Tools')), 'shopping': StaticCategory(id=141, name=_(u'Shopping')), 'social-communication': StaticCategory( id=71, name=_(u'Social & Communication')), 'tabs': StaticCategory(id=93, name=_(u'Tabs')), 'web-development': StaticCategory( id=4, name=_(u'Web Development')), 'other': StaticCategory(id=73, name=_(u'Other'), weight=333) }, ADDON_THEME: { 'animals': StaticCategory(id=30, name=_(u'Animals')), 'compact': StaticCategory(id=32, name=_(u'Compact')), 'large': StaticCategory(id=67, name=_(u'Large')), 'miscellaneous': StaticCategory(id=21, name=_(u'Miscellaneous')), 'modern': StaticCategory(id=62, name=_(u'Modern')), 'nature': StaticCategory(id=29, name=_(u'Nature')), 'os-integration': StaticCategory(id=61, name=_(u'OS Integration')), 'retro': StaticCategory(id=31, name=_(u'Retro')), 'sports': StaticCategory(id=26, name=_(u'Sports')) }, ADDON_STATICTHEME: { 'abstract': StaticCategory(id=300, name=_(u'Abstract')), 'causes': StaticCategory(id=320, name=_(u'Causes')), 'fashion': StaticCategory(id=324, name=_(u'Fashion')), 'film-and-tv': StaticCategory(id=326, name=_(u'Film and TV')), 'firefox': StaticCategory(id=308, name=_(u'Firefox')), 'foxkeh': StaticCategory(id=310, name=_(u'Foxkeh')), 'holiday': StaticCategory(id=328, name=_(u'Holiday')), 'music': StaticCategory(id=322, name=_(u'Music')), 'nature': StaticCategory(id=302, name=_(u'Nature')), 'other': StaticCategory(id=314, name=_(u'Other'), weight=333), 'scenery': StaticCategory(id=306, name=_(u'Scenery')), 'seasonal': StaticCategory(id=312, name=_(u'Seasonal')), 'solid': StaticCategory(id=318, name=_(u'Solid')), 'sports': StaticCategory(id=304, name=_(u'Sports')), 'websites': StaticCategory(id=316, name=_(u'Websites')) }, ADDON_DICT: { 'general': StaticCategory(id=95, name=_(u'General')) },<|fim▁hole|> 'business': StaticCategory(id=80, name=_(u'Business')), 'dictionaries-encyclopedias': StaticCategory( id=81, name=_(u'Dictionaries & Encyclopedias')), 'general': StaticCategory(id=82, name=_(u'General')), 'kids': StaticCategory(id=83, name=_(u'Kids')), 'multiple-search': StaticCategory( id=84, name=_(u'Multiple Search')), 'music': StaticCategory(id=85, name=_(u'Music')), 'news-blogs': StaticCategory(id=86, name=_(u'News & Blogs')), 'photos-images': StaticCategory(id=87, name=_(u'Photos & Images')), 'shopping-e-commerce': StaticCategory( id=88, name=_(u'Shopping & E-Commerce')), 'social-people': StaticCategory(id=89, name=_(u'Social & People')), 'sports': StaticCategory(id=90, name=_(u'Sports')), 'travel': StaticCategory(id=91, name=_(u'Travel')), 'video': StaticCategory(id=78, name=_(u'Video')) }, ADDON_LPAPP: { 'general': StaticCategory(id=98, name=_(u'General')) }, ADDON_PERSONA: { 'abstract': StaticCategory(id=100, name=_(u'Abstract')), 'causes': StaticCategory(id=120, name=_(u'Causes')), 'fashion': StaticCategory(id=124, name=_(u'Fashion')), 'film-and-tv': StaticCategory(id=126, name=_(u'Film and TV')), 'firefox': StaticCategory(id=108, name=_(u'Firefox')), 'foxkeh': StaticCategory(id=110, name=_(u'Foxkeh')), 'holiday': StaticCategory(id=128, name=_(u'Holiday')), 'music': StaticCategory(id=122, name=_(u'Music')), 'nature': StaticCategory(id=102, name=_(u'Nature')), 'other': StaticCategory(id=114, name=_(u'Other')), 'scenery': StaticCategory(id=106, name=_(u'Scenery')), 'seasonal': StaticCategory(id=112, name=_(u'Seasonal')), 'solid': StaticCategory(id=118, name=_(u'Solid')), 'sports': StaticCategory(id=104, name=_(u'Sports')), 'websites': StaticCategory(id=116, name=_(u'Websites')) } }, ANDROID.id: { ADDON_EXTENSION: { 'device-features-location': StaticCategory( id=145, name=_(u'Device Features & Location')), 'experimental': StaticCategory(id=151, name=_(u'Experimental')), 'feeds-news-blogging': StaticCategory( id=147, name=_(u'Feeds, News, & Blogging')), 'performance': StaticCategory(id=144, name=_(u'Performance')), 'photos-media': StaticCategory(id=143, name=_(u'Photos & Media')), 'security-privacy': StaticCategory( id=149, name=_(u'Security & Privacy')), 'shopping': StaticCategory(id=150, name=_(u'Shopping')), 'social-networking': StaticCategory( id=148, name=_(u'Social Networking')), 'sports-games': StaticCategory(id=146, name=_(u'Sports & Games')), 'user-interface': StaticCategory( id=152, name=_(u'User Interface')), 'other': StaticCategory(id=153, name=_(u'Other'), weight=333) } }, THUNDERBIRD.id: { ADDON_EXTENSION: { 'appearance': StaticCategory( id=208, name=_(u'Appearance and Customization')), 'calendar': StaticCategory( id=204, name=_(u'Calendar and Date/Time')), 'chat': StaticCategory(id=210, name=_(u'Chat and IM')), 'composition': StaticCategory( id=202, name=_(u'Message Composition')), 'contacts': StaticCategory(id=23, name=_(u'Contacts')), 'folders-and-filters': StaticCategory( id=200, name=_(u'Folders and Filters')), 'importexport': StaticCategory(id=206, name=_(u'Import/Export')), 'language-support': StaticCategory( id=69, name=_(u'Language Support')), 'message-and-news-reading': StaticCategory( id=58, name=_(u'Message and News Reading')), 'miscellaneous': StaticCategory(id=50, name=_(u'Miscellaneous')), 'privacy-and-security': StaticCategory( id=66, name=_(u'Privacy and Security')), 'tags': StaticCategory(id=212, name=_(u'Tags')) }, ADDON_THEME: { 'compact': StaticCategory(id=64, name=_(u'Compact')), 'miscellaneous': StaticCategory(id=60, name=_(u'Miscellaneous')), 'modern': StaticCategory(id=63, name=_(u'Modern')), 'nature': StaticCategory(id=65, name=_(u'Nature')) }, ADDON_DICT: { 'general': StaticCategory(id=97, name=_(u'General')) }, ADDON_LPAPP: { 'general': StaticCategory(id=99, name=_(u'General')) } }, SEAMONKEY.id: { ADDON_EXTENSION: { 'bookmarks': StaticCategory(id=51, name=_(u'Bookmarks')), 'downloading-and-file-management': StaticCategory( id=42, name=_(u'Downloading and File Management')), 'interface-customizations': StaticCategory( id=48, name=_(u'Interface Customizations')), 'language-support-and-translation': StaticCategory( id=55, name=_(u'Language Support and Translation')), 'miscellaneous': StaticCategory( id=49, name=_(u'Miscellaneous')), 'photos-and-media': StaticCategory( id=56, name=_(u'Photos and Media')), 'privacy-and-security': StaticCategory( id=46, name=_(u'Privacy and Security')), 'rss-news-and-blogging': StaticCategory( id=39, name=_(u'RSS, News and Blogging')), 'search-tools': StaticCategory(id=47, name=_(u'Search Tools')), 'site-specific': StaticCategory(id=52, name=_(u'Site-specific')), 'web-and-developer-tools': StaticCategory( id=41, name=_(u'Web and Developer Tools')) }, ADDON_THEME: { 'miscellaneous': StaticCategory(id=59, name=_(u'Miscellaneous')) }, ADDON_DICT: { 'general': StaticCategory(id=96, name=_(u'General')) }, ADDON_LPAPP: { 'general': StaticCategory(id=130, name=_(u'General')) } }, } CATEGORIES_BY_ID = {} for app in CATEGORIES: for type_ in CATEGORIES[app]: for slug in CATEGORIES[app][type_]: cat = CATEGORIES[app][type_][slug] # Flatten some values and set them, avoiding immutability # of `StaticCategory` by calling `object.__setattr__` directly. if slug in ('miscellaneous', 'other'): object.__setattr__(cat, 'misc', True) object.__setattr__(cat, 'slug', slug) object.__setattr__(cat, 'application', app) object.__setattr__(cat, 'type', type_) CATEGORIES_BY_ID[cat.id] = cat<|fim▁end|>
ADDON_SEARCH: { 'bookmarks': StaticCategory(id=79, name=_(u'Bookmarks')),
<|file_name|>sql_translator.py<|end_file_name|><|fim▁begin|>from rapt.treebrd.attributes import AttributeList from ...treebrd.node import Operator from ..base_translator import BaseTranslator class SQLQuery: """ Structure defining the building blocks of a SQL query. """ def __init__(self, select_block, from_block, where_block=''): self.prefix = '' self.select_block = select_block self.from_block = from_block self.where_block = where_block @property def _basic_query(self): if self.select_block: return '{prefix}' \ 'SELECT {select} FROM {relation}' else: return '{prefix}{relation}' @property def _sql_query_skeleton(self): sql = self._basic_query if self.where_block: sql += ' WHERE {conditions}' return sql def to_sql(self): """ Construct a SQL query based on the stored blocks. :return: a SQL query """ return self._sql_query_skeleton.format( prefix=self.prefix, select=self.select_block, relation=self.from_block, conditions=self.where_block) class SQLSetQuery(SQLQuery): """ Structure defining the building blocks of a SQL query with set semantics. """ @property def _basic_query(self): return '{prefix}' \ 'SELECT DISTINCT {select} FROM {relation}' class Translator(BaseTranslator): """ A Translator defining the operations for translating a relational algebra statement into a SQL statement using bag semantics. """ query = SQLQuery<|fim▁hole|> @classmethod def _get_temp_name(cls, node): return node.name or '_{}'.format(id(node)) @classmethod def _get_sql_operator(cls, node): operators = { Operator.union: 'UNION', Operator.difference: 'EXCEPT', Operator.intersect: 'INTERSECT', Operator.cross_join: 'CROSS JOIN', Operator.theta_join: 'JOIN', Operator.natural_join: 'NATURAL JOIN', } return operators[node.operator] def relation(self, node): """ Translate a relation node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ return self.query(select_block=str(node.attributes), from_block=node.name) def select(self, node): """ Translate a select node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ child_object = self.translate(node.child) where_block = node.conditions if child_object.where_block: where_block = '({0}) AND ({1})'\ .format(child_object.where_block, node.conditions) child_object.where_block = where_block if not child_object.select_block: child_object.select_block = str(node.attributes) return child_object def project(self, node): """ Translate a project node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ child_object = self.translate(node.child) child_object.select_block = str(node.attributes) return child_object def rename(self, node): """ Translate a rename node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ child_object = self.translate(node.child) from_block = '({child}) AS {name}({attributes})'.format( child=child_object.to_sql(), name=node.name, attributes=', '.join(node.attributes.names)) return self.query(str(node.attributes), from_block=from_block) def assign(self, node): """ Translate an assign node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ child_object = self.translate(node.child) child_object.prefix = 'CREATE TEMPORARY TABLE {name}({attributes}) AS '\ .format(name=node.name, attributes=', '.join(node.attributes.names)) return child_object def natural_join(self, node): """ Translate an assign node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ return self._join(node) def theta_join(self, node): """ Translate an assign node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ return self._join(node) def cross_join(self, node): """ Translate a cross join node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ return self._join(node) def union(self, node): """ Translate a union node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ return self._set_op(node) def intersect(self, node): """ Translate an intersection node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ return self._set_op(node) def difference(self, node): """ Translate an difference node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ return self._set_op(node) def _join_helper(self, node): sobject = self.translate(node) if node.operator in { Operator.cross_join, Operator.natural_join, Operator.theta_join }: return sobject.from_block else: return '({subquery}) AS {name}'.format( subquery=sobject.to_sql(), name=self._get_temp_name(node)) def _join(self, node): """ Translate a join node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ select_block = str(node.attributes) from_block = '{left} {operator} {right}'.format( left=self._join_helper(node.left), right=self._join_helper(node.right), operator=self._get_sql_operator(node)) if node.operator == Operator.theta_join: from_block = '{from_block} ON {conditions}'.format( from_block=from_block, conditions=node.conditions) return self.query(select_block, from_block, '') def _set_op(self, node): """ Translate a set operator node into SQLQuery. :param node: a treebrd node :return: a SQLQuery object for the tree rooted at node """ select_block = str(node.attributes) from_block = '({left} {operator} ALL {right}) AS {name}'.format( left=self.translate(node.left).to_sql(), right=self.translate(node.right).to_sql(), operator=self._get_sql_operator(node), name=self._get_temp_name(node)) return self.query(select_block=select_block, from_block=from_block) class SetTranslator(Translator): """ A Translator defining the operations for translating a relational algebra statement into a SQL statement using set semantics. """ query = SQLSetQuery def _set_op(self, node): """ Translate a set operator node into SQLQuery, using set semantics. :param node: a treebrd node :return: a SQLSetQuery object for the tree rooted at node """ select_block = str(node.attributes) from_block = '({left} {operator} {right}) AS {name}'.format( left=self.translate(node.left).to_sql(), right=self.translate(node.right).to_sql(), operator=self._get_sql_operator(node), name=self._get_temp_name(node)) return self.query(select_block=select_block, from_block=from_block) def translate(root_list, use_bag_semantics=False): """ Translate a list of relational algebra trees into SQL statements. :param root_list: a list of tree roots :param use_bag_semantics: flag for using relational algebra bag semantics :return: a list of SQL statements """ translator = (Translator() if use_bag_semantics else SetTranslator()) return [translator.translate(root).to_sql() for root in root_list]<|fim▁end|>
<|file_name|>onegbe.py<|end_file_name|><|fim▁begin|>import logging import struct from memory import Memory from network import Mac, IpAddress from gbe import Gbe LOGGER = logging.getLogger(__name__) # Offsets for fields in the memory map, in bytes OFFSET_CORE_TYPE = 0x0 OFFSET_BUFFER_SIZE = 0x4 OFFSET_WORD_LEN = 0x8 OFFSET_MAC_ADDR = 0xc OFFSET_IP_ADDR = 0x14 OFFSET_GW_ADDR = 0x18 OFFSET_NETMASK = 0x1c OFFSET_MC_IP = 0x20 OFFSET_MC_MASK = 0x24 OFFSET_BUF_VLD = 0x28 OFFSET_FLAGS = 0x2c OFFSET_PORT = 0x30 OFFSET_STATUS = 0x34 OFFSET_CONTROL = 0x40 OFFSET_ARP_SIZE = 0x44 OFFSET_TX_PKT_RATE = 0x48 OFFSET_TX_PKT_CNT = 0x4c OFFSET_TX_VLD_RATE = 0x50 OFFSET_TX_VLD_CNT = 0x54 OFFSET_TX_OF_CNT = 0x58 OFFSET_TX_AF_CNT = 0x5c OFFSET_RX_PKT_RATE = 0x60 OFFSET_RX_PKT_CNT = 0x64 OFFSET_RX_VLD_RATE = 0x68 OFFSET_RX_VLD_CNT = 0x6c OFFSET_RX_OF_CNT = 0x70 OFFSET_RX_AF_CNT = 0x74 OFFSET_COUNT_RST = 0x78 OFFSET_ARP_CACHE = 0x1000 OFFSET_TX_BUFFER = 0x4000 OFFSET_RX_BUFFER = 0x8000 # Sizes for fields in the memory map, in bytes SIZE_CORE_TYPE = 0x4 SIZE_BUFFER_SIZE = 0x4 SIZE_WORD_LEN = 0x4 SIZE_MAC_ADDR = 0x8 SIZE_IP_ADDR = 0x4 SIZE_GW_ADDR = 0x4 SIZE_NETMASK = 0x4 SIZE_MC_IP = 0x4 SIZE_MC_MASK = 0x4 SIZE_BUF_AVAIL = 0x4 SIZE_FLAGS = 0x4 SIZE_PORT = 0x4 SIZE_STATUS = 0x8 SIZE_CONTROL = 0x8 SIZE_ARP_SIZE = 0x4 SIZE_TX_PKT_RATE = 0x4 SIZE_TX_PKT_CNT = 0x4 SIZE_TX_VLD_RATE = 0x4 SIZE_TX_VLD_CNT = 0x4 SIZE_TX_OF_CNT = 0x4 SIZE_TX_AF_CNT = 0x4 SIZE_RX_PKT_RATE = 0x4 SIZE_RX_PKT_CNT = 0x4 SIZE_RX_VLD_RATE = 0x4 SIZE_RX_VLD_CNT = 0x4 SIZE_RX_OF_CNT = 0x4 SIZE_RX_AF_CNT = 0x4 SIZE_COUNT_RST = 0x4 SIZE_ARP_CACHE = 0x3000 SIZE_TX_BUFFER = 0x4000 SIZE_RX_BUFFER = 0x4000 class OneGbe(Memory, Gbe): """ To do with the CASPER ten GBE yellow block implemented on FPGAs, and interfaced-to via KATCP memory reads/writes. """ def __init__(self, parent, name, address, length_bytes, device_info=None): """ :param parent: Parent object who owns this TenGbe instance :param name: Unique name of the instance :param address: :param length_bytes: :param device_info: Information about this device """ Memory.__init__(self, name, 32, address, length_bytes) Gbe.__init__(self, parent, name, address, length_bytes, device_info) self.memmap_compliant = self._check_memmap_compliance() @property def mac(self): return self.get_gbe_core_details()['mac'] @property def ip_address(self): return self.get_gbe_core_details()['ip'] @property def port(self): return self.get_gbe_core_details()['fabric_port'] def _check_memmap_compliance(self): """ Look at the first word of the core's memory map and try to figure out if it compliant with the harmonized ethernet map. This isn't flawless, but unless the user sets a very weird MAC address for their core (which is what the old core's map stored in register 0, it should be OK). """ x = self.parent.read(self.name, 4) cpu_tx_en, cpu_rx_en, rev, core_type = struct.unpack('4B', x) if (cpu_tx_en > 1) or (cpu_rx_en > 1) or (core_type != 2): return False else: return True def post_create_update(self, raw_device_info): """ Update the device with information not available at creation. :param raw_device_info: info about this block that may be useful """ super(TenGbe, self).post_create_update(raw_device_info) self.snaps = {'tx': None, 'rx': None} for snapshot in self.parent.snapshots: if snapshot.name.find(self.name + '_') == 0: name = snapshot.name.replace(self.name + '_', '') if name == 'txs_ss': self.snaps['tx'] = snapshot.name elif name == 'rxs_ss': self.snaps['rx'] = snapshot.name else: errmsg = '%s: incorrect snap %s under tengbe ' \ 'block' % (self.fullname, snapshot.name) LOGGER.error(errmsg) raise RuntimeError(errmsg) def read_txsnap(self): """ Read the TX snapshot embedded in this TenGBE yellow block """ return self.snaps['tx'].read(timeout=10)['data'] def read_rxsnap(self): """ Read the RX snapshot embedded in this TenGBE yellow block """ return self.snaps['rx'].read(timeout=10)['data'] # def fabric_start(self): # """ # Setup the interface by writing to the fabric directly, bypassing tap. # :param self: # :return: # """ # if self.tap_running(): # log_runtime_error( # LOGGER, 'TAP running on %s, stop tap before ' # 'accessing fabric directly.' % self.name) # mac_location = 0x00 # ip_location = 0x10 # port_location = 0x22 # self.parent.write(self.name, self.mac.packed(), mac_location) # self.parent.write(self.name, self.ip_address.packed(), ip_location) # # self.parent.write_int(self.name, self.port, offset = port_location) def dhcp_start(self): """ Configure this interface, then start a DHCP client on ALL interfaces. """ #if self.mac is None: # TODO get MAC from EEPROM serial number and assign here # self.mac = '0' reply, _ = self.parent.transport.katcprequest( name='tap-start', request_timeout=5, require_ok=True, request_args=(self.name, self.name, '0.0.0.0', str(self.port), str(self.mac), )) if reply.arguments[0] != 'ok': raise RuntimeError('%s: failure starting tap driver.' % self.name) reply, _ = self.parent.transport.katcprequest( name='tap-arp-config', request_timeout=1, require_ok=True, request_args=(self.name, 'mode', '0')) if reply.arguments[0] != 'ok': raise RuntimeError('%s: failure disabling ARP.' % self.name) reply, _ = self.parent.transport.katcprequest( name='tap-dhcp', request_timeout=30, require_ok=True, request_args=(self.name, )) if reply.arguments[0] != 'ok': raise RuntimeError('%s: failure starting DHCP client.' % self.name) reply, _ = self.parent.transport.katcprequest( name='tap-arp-config', request_timeout=1, require_ok=True, request_args=(self.name, 'mode', '-1')) if reply.arguments[0] != 'ok': raise RuntimeError('%s: failure re-enabling ARP.' % self.name) # it looks like the command completed without error, so # update the basic core details self.get_gbe_core_details() def tap_start(self, restart=False): """ Program a 10GbE device and start the TAP driver. :param restart: stop before starting """ if len(self.name) > 8: raise NameError('%s: tap device identifier must be shorter than 9 ' 'characters..' % self.fullname) if restart: self.tap_stop() if self.tap_running(): LOGGER.info('%s: tap already running.' % self.fullname) return LOGGER.info('%s: starting tap driver.' % self.fullname) reply, _ = self.parent.transport.katcprequest( name='tap-start', request_timeout=-1, require_ok=True, request_args=(self.name, self.name, str(self.ip_address), str(self.port), str(self.mac), )) if reply.arguments[0] != 'ok': raise RuntimeError('%s: failure starting tap driver.' % self.fullname) def tap_stop(self): """ Stop a TAP driver. """ if not self.tap_running(): return LOGGER.info('%s: stopping tap driver.' % self.fullname) reply, _ = self.parent.transport.katcprequest( name='tap-stop', request_timeout=-1, require_ok=True, request_args=(self.name, )) if reply.arguments[0] != 'ok': raise RuntimeError('%s: failure stopping tap ' 'device.' % self.fullname) def tap_info(self): """ Get info on the tap instance running on this interface. """ uninforms = [] def handle_inform(msg): uninforms.append(msg) self.parent.unhandled_inform_handler = handle_inform _, informs = self.parent.transport.katcprequest( name='tap-info', request_timeout=-1, require_ok=False, request_args=(self.name, )) self.parent.unhandled_inform_handler = None # process the tap-info if len(informs) == 1: return {'name': informs[0].arguments[0], 'ip': informs[0].arguments[1]} elif len(informs) == 0: return {'name': '', 'ip': ''} else: raise RuntimeError('%s: invalid return from tap-info?' % self.fullname) # TODO - this request should return okay if the tap isn't # running - it shouldn't fail # if reply.arguments[0] != 'ok': # log_runtime_error(LOGGER, 'Failure getting tap info for ' # 'device %s." % str(self)) def tap_running(self): """ Determine if an instance if tap is already running on for this ten GBE interface. """ tapinfo = self.tap_info() if tapinfo['name'] == '': return False return True def tap_arp_reload(self): """ Have the tap driver reload its ARP table right now. """ reply, _ = self.parent.transport.katcprequest( name="tap-arp-reload", request_timeout=-1, require_ok=True, request_args=(self.name, )) if reply.arguments[0] != 'ok': raise RuntimeError('Failure requesting ARP reload for tap ' 'device %s.' % str(self)) def multicast_receive(self, ip_str, group_size): """ Send a request to KATCP to have this tap instance send a multicast group join request. :param ip_str: A dotted decimal string representation of the base mcast IP address. :param group_size: An integer for how many mcast addresses from base to respond to. """ # mask = 255*(2 ** 24) + 255*(2 ** 16) + 255*(2 ** 8) + (255-group_size) # self.parent.write_int(self.name, str2ip(ip_str), offset=12) # self.parent.write_int(self.name, mask, offset=13) # mcast_group_string = ip_str + '+' + str(group_size) mcast_group_string = ip_str reply, _ = self.parent.transport.katcprequest( 'tap-multicast-add', -1, True, request_args=(self.name, 'recv', mcast_group_string, )) if reply.arguments[0] == 'ok': if mcast_group_string not in self.multicast_subscriptions: self.multicast_subscriptions.append(mcast_group_string) return else: raise RuntimeError('%s: failed adding multicast receive %s to ' 'tap device.' % (self.fullname, mcast_group_string)) def multicast_remove(self, ip_str): """ Send a request to be removed from a multicast group. :param ip_str: A dotted decimal string representation of the base mcast IP address. """ try: reply, _ = self.parent.transport.katcprequest( 'tap-multicast-remove', -1, True, request_args=(self.name, IpAddress.str2ip(ip_str), )) except: raise RuntimeError('%s: tap-multicast-remove does not seem to ' 'be supported on %s' % (self.fullname, self.parent.host)) if reply.arguments[0] == 'ok': if ip_str not in self.multicast_subscriptions: LOGGER.warning( '%s: That is odd, %s removed from mcast subscriptions, but ' 'it was not in its list of sbscribed addresses.' % ( self.fullname, ip_str)) self.multicast_subscriptions.remove(ip_str) return else: raise RuntimeError('%s: failed removing multicast address %s ' 'from tap device' % (self.fullname, IpAddress.str2ip(ip_str))) def _fabric_enable_disable(self, target_val): """ :param target_val: """ if self.memmap_compliant: word_bytes = list( struct.unpack('>4B', self.parent.read(self.name, 4, OFFSET_FLAGS))) if word_bytes[0] == target_val: return word_bytes[0] = target_val word_packed = struct.pack('>4B', *word_bytes) self.parent.write(self.name, word_packed, OFFSET_FLAGS) else: # 0x20 or (0x20 / 4)? What was the /4 for? word_bytes = list( struct.unpack('>4B', self.parent.read(self.name, 4, 0x20))) if word_bytes[1] == target_val: return word_bytes[1] = target_val word_packed = struct.pack('>4B', *word_bytes) self.parent.write(self.name, word_packed, 0x20) def fabric_enable(self): """ Enable the core fabric """ self._fabric_enable_disable(1) def fabric_disable(self): """ Enable the core fabric """ self._fabric_enable_disable(0) def fabric_soft_reset_toggle(self): """ Toggle the fabric soft reset """ if self.memmap_compliant: word_bytes = struct.unpack('>4B', self.parent.read(self.name, 4, OFFSET_FLAGS)) word_bytes = list(word_bytes) def write_val(val): word_bytes[2] = val word_packed = struct.pack('>4B', *word_bytes) if val == 0: self.parent.write(self.name, word_packed, OFFSET_FLAGS) else: self.parent.blindwrite(self.name, word_packed, OFFSET_FLAGS) if word_bytes[2] == 1: write_val(0) write_val(1) write_val(0) else: word_bytes = struct.unpack('>4B', self.parent.read(self.name, 4, 0x20)) word_bytes = list(word_bytes) def write_val(val): word_bytes[0] = val word_packed = struct.pack('>4B', *word_bytes) if val == 0: self.parent.write(self.name, word_packed, 0x20) else: self.parent.blindwrite(self.name, word_packed, 0x20) if word_bytes[0] == 1: write_val(0) write_val(1) write_val(0) def get_gbe_core_details(self, read_arp=False, read_cpu=False): """ Get 10GbE core details. assemble struct for header stuff... .. code-block:: python \"\"\" 0x00 - 0x07: MAC address 0x08 - 0x0b: Not used 0x0c - 0x0f: Gateway addr 0x10 - 0x13: IP addr 0x14 - 0x17: Not assigned 0x18 - 0x1b: Buffer sizes 0x1c - 0x1f: Not assigned 0x20 : Soft reset (bit 0) 0x21 : Fabric enable (bit 0) 0x22 - 0x23: Fabric port 0x24 - 0x27: XAUI status (bit 2,3,4,5 = lane sync, bit6 = chan_bond) 0x28 - 0x2b: PHY config 0x28 : RX_eq_mix 0x29 : RX_eq_pol 0x2a : TX_preemph 0x2b : TX_diff_ctrl 0x30 - 0x33: Multicast IP RX base address 0x34 - 0x37: Multicast IP mask 0x38 - 0x3b: Subnet mask 0x1000 : CPU TX buffer 0x2000 : CPU RX buffer 0x3000 : ARP tables start word_width = 8 \"\"\" self.add_field(Bitfield.Field('mac0', 0, word_width, 0, 0 * word_width)) self.add_field(Bitfield.Field('mac1', 0, word_width, 0, 1 * word_width)) self.add_field(Bitfield.Field('mac2', 0, word_width, 0, 2 * word_width)) self.add_field(Bitfield.Field('mac3', 0, word_width, 0, 3 * word_width)) self.add_field(Bitfield.Field('mac4', 0, word_width, 0, 4 * word_width)) self.add_field(Bitfield.Field('mac5', 0, word_width, 0, 5 * word_width)) self.add_field(Bitfield.Field('mac6', 0, word_width, 0, 6 * word_width)) self.add_field(Bitfield.Field('mac7', 0, word_width, 0, 7 * word_width)) self.add_field(Bitfield.Field('unused_1', 0, (0x0c - 0x08) * word_width, 0, 8 * word_width)) self.add_field(Bitfield.Field('gateway_ip0', 0, word_width, 0, 0x0c * word_width)) self.add_field(Bitfield.Field('gateway_ip1', 0, word_width, 0, 0x0d * word_width)) self.add_field(Bitfield.Field('gateway_ip2', 0, word_width, 0, 0x0e * word_width)) self.add_field(Bitfield.Field('gateway_ip3', 0, word_width, 0, 0x0f * word_width)) self.add_field(Bitfield.Field('ip0', 0, word_width, 0, 0x10 * word_width)) self.add_field(Bitfield.Field('ip1', 0, word_width, 0, 0x11 * word_width)) self.add_field(Bitfield.Field('ip2', 0, word_width, 0, 0x12 * word_width)) self.add_field(Bitfield.Field('ip3', 0, word_width, 0, 0x13 * word_width)) self.add_field(Bitfield.Field('unused_2', 0, (0x18 - 0x14) * word_width, 0, 0x14 * word_width)) self.add_field(Bitfield.Field('buf_sizes', 0, (0x1c - 0x18) * word_width, 0, 0x18 * word_width)) self.add_field(Bitfield.Field('unused_3', 0, (0x20 - 0x1c) * word_width, 0, 0x1c * word_width)) self.add_field(Bitfield.Field('soft_reset', 2, 1, 0, 0x20 * word_width)) self.add_field(Bitfield.Field('fabric_enable', 2, 1, 0, 0x21 * word_width)) self.add_field(Bitfield.Field('port', 0, (0x24 - 0x22) * word_width, 0, 0x22 * word_width)) self.add_field(Bitfield.Field('xaui_status', 0, (0x28 - 0x24) * word_width, 0, 0x24 * word_width)) self.add_field(Bitfield.Field('rx_eq_mix', 0, word_width, 0, 0x28 * word_width)) self.add_field(Bitfield.Field('rq_eq_pol', 0, word_width, 0, 0x29 * word_width)) self.add_field(Bitfield.Field('tx_preempth', 0, word_width, 0, 0x2a * word_width)) self.add_field(Bitfield.Field('tx_diff_ctrl', 0, word_width, 0, 0x2b * word_width)) #self.add_field(Bitfield.Field('buffer_tx', 0, 0x1000 * word_width, 0, 0x1000 * word_width)) #self.add_field(Bitfield.Field('buffer_rx', 0, 0x1000 * word_width, 0, 0x2000 * word_width)) #self.add_field(Bitfield.Field('arp_table', 0, 0x1000 * word_width, 0, 0x3000 * word_width)) """ if self.memmap_compliant: data = self.parent.read(self.name, 16384) data = list(struct.unpack('>16384B', data)) returnval = { 'ip_prefix': '%i.%i.%i.' % (data[0x14], data[0x15], data[0x16]), 'ip': IpAddress('%i.%i.%i.%i' % (data[0x14], data[0x15], data[0x16], data[0x17])), 'subnet_mask': IpAddress('%i.%i.%i.%i' % ( data[0x1c], data[0x1d], data[0x1e], data[0x1f])), 'mac': Mac('%i:%i:%i:%i:%i:%i' % (data[0x0e], data[0x0f], data[0x10], data[0x11], data[0x12], data[0x13])), 'gateway_ip': IpAddress('%i.%i.%i.%i' % (data[0x18], data[0x19], data[0x1a], data[0x1b])), 'fabric_port': ((data[0x32] << 8) + (data[0x33])), 'fabric_en': bool(data[0x2f] & 1), 'multicast': {'base_ip': IpAddress('%i.%i.%i.%i' % ( data[0x20], data[0x21], data[0x22], data[0x23])), 'ip_mask': IpAddress('%i.%i.%i.%i' % ( data[0x24], data[0x25], data[0x26], data[0x27])), 'rx_ips': []} } else: data = self.parent.read(self.name, 16384) data = list(struct.unpack('>16384B', data)) returnval = { 'ip_prefix': '%i.%i.%i.' % (data[0x10], data[0x11], data[0x12]), 'ip': IpAddress('%i.%i.%i.%i' % (data[0x10], data[0x11], data[0x12], data[0x13])), 'subnet_mask': IpAddress('%i.%i.%i.%i' % ( data[0x38], data[0x39], data[0x3a], data[0x3b])), 'mac': Mac('%i:%i:%i:%i:%i:%i' % (data[0x02], data[0x03], data[0x04], data[0x05], data[0x06], data[0x07])), 'gateway_ip': IpAddress('%i.%i.%i.%i' % (data[0x0c], data[0x0d], data[0x0e], data[0x0f])), 'fabric_port': ((data[0x22] << 8) + (data[0x23])), 'fabric_en': bool(data[0x21] & 1), 'xaui_lane_sync': [bool(data[0x27] & 4), bool(data[0x27] & 8), bool(data[0x27] & 16), bool(data[0x27] & 32)], 'xaui_status': [data[0x24], data[0x25], data[0x26], data[0x27]], 'xaui_chan_bond': bool(data[0x27] & 64), 'xaui_phy': {'rx_eq_mix': data[0x28], 'rx_eq_pol': data[0x29], 'tx_preemph': data[0x2a], 'tx_swing': data[0x2b]}, 'multicast': {'base_ip': IpAddress('%i.%i.%i.%i' % ( data[0x30], data[0x31], data[0x32], data[0x33])), 'ip_mask': IpAddress('%i.%i.%i.%i' % ( data[0x34], data[0x35], data[0x36], data[0x37])), 'rx_ips': []} } possible_addresses = [int(returnval['multicast']['base_ip'])] mask_int = int(returnval['multicast']['ip_mask']) for ctr in range(32): mask_bit = (mask_int >> ctr) & 1 if not mask_bit: new_ips = [] for ip in possible_addresses: new_ips.append(ip & (~(1 << ctr))) new_ips.append(new_ips[-1] | (1 << ctr)) possible_addresses.extend(new_ips) tmp = list(set(possible_addresses)) for ip in tmp: returnval['multicast']['rx_ips'].append(IpAddress(ip)) if read_arp: returnval['arp'] = self.get_arp_details(data) if read_cpu: returnval.update(self.get_cpu_details(data)) self.core_details = returnval return returnval def get_arp_details(self, port_dump=None): """ Get ARP details from this interface. :param port_dump: A list of raw bytes from interface memory. :type port_dump: list """ if self.memmap_compliant: arp_addr = OFFSET_ARP_CACHE else: arp_addr = 0x3000 if port_dump is None: port_dump = self.parent.read(self.name, 16384) port_dump = list(struct.unpack('>16384B', port_dump)) returnval = [] for addr in range(256): mac = [] for ctr in range(2, 8): mac.append(port_dump[arp_addr + (addr * 8) + ctr]) returnval.append(mac) return returnval def get_cpu_details(self, port_dump=None): """ Read details of the CPU buffers. :param port_dump: """ #TODO Not memmap compliant if port_dump is None: port_dump = self.parent.read(self.name, 16384) port_dump = list(struct.unpack('>16384B', port_dump)) returnval = {'cpu_tx': {}} for ctr in range(4096 / 8): tmp = [] for ctr2 in range(8): tmp.append(port_dump[4096 + (8 * ctr) + ctr2]) returnval['cpu_tx'][ctr*8] = tmp returnval['cpu_rx_buf_unack_data'] = port_dump[6 * 4 + 3] returnval['cpu_rx'] = {} for ctr in range(port_dump[6 * 4 + 3] + 8): tmp = [] for ctr2 in range(8): tmp.append(port_dump[8192 + (8 * ctr) + ctr2]) returnval['cpu_rx'][ctr * 8] = tmp return returnval def set_arp_table(self, macs): """Set the ARP table with a list of MAC addresses. The list, `macs`, is passed such that the zeroth element is the MAC address of the device with IP XXX.XXX.XXX.0, and element N is the MAC address of the device with IP XXX.XXX.XXX.N""" if self.memmap_compliant: arp_addr = OFFSET_ARP_CACHE else: arp_addr = 0x3000 macs = list(macs) macs_pack = struct.pack('>%dQ' % (len(macs)), *macs) self.parent.write(self.name, macs_pack, offset=arp_addr)<|fim▁hole|><|fim▁end|>
# end
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals import os from mopidy import config, exceptions, ext __version__ = '0.2.2' class GMusicExtension(ext.Extension): dist_name = 'Mopidy-GMusic'<|fim▁hole|> conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(GMusicExtension, self).get_config_schema() schema['username'] = config.String() schema['password'] = config.Secret() schema['deviceid'] = config.String(optional=True) return schema def validate_environment(self): try: import gmusicapi # noqa except ImportError as e: raise exceptions.ExtensionError('gmusicapi library not found', e) pass def get_backend_classes(self): from .actor import GMusicBackend return [GMusicBackend]<|fim▁end|>
ext_name = 'gmusic' version = __version__ def get_default_config(self):
<|file_name|>no_libc.rs<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. pub use cty_base::types::{c_int}; #[no_mangle] pub unsafe extern fn memchr(mut s: *const u8, c: c_int, n: usize) -> *const u8 { let end = s.add(n); let c = c as u8; while s != end { if *s == c { return s; } s = s.add(1); } 0 as *const u8 } #[no_mangle] pub unsafe extern fn memrchr(mut s: *const u8, c: c_int, n: usize) -> *const u8 { s = s.sub(1); let mut end = s.add(n); let c = c as u8; while s != end { if *end == c { return end; } end = end.sub(1); } 0 as *const u8 } #[no_mangle] pub unsafe extern fn memcmp(mut s1: *const u8, mut s2: *const u8, n: usize) -> c_int { let end = s1.add(n); while s1 != end { if *s1 == *s2 { s1 = s1.add(1); s2 = s2.add(1); continue; } if *s1 < *s2 { return -1; } return 1; } 0 } #[no_mangle] pub unsafe extern fn memcpy(mut dst: *mut u8, mut src: *const u8, n: usize) -> *const u8 { dst = dst.sub(1); src = src.sub(1); let mut dst_end = dst.add(n); let mut src_end = src.add(n); while dst != dst_end { *dst_end = *src_end; dst_end = dst_end.sub(1); src_end = src_end.sub(1); } dst.add(1) } #[no_mangle] pub unsafe extern fn memmove(mut dst: *mut u8, mut src: *const u8, n: usize) -> *const u8 { if src as usize <= dst as usize {<|fim▁hole|> let dst_end = dst.add(n); while dst != dst_end { *dst = *src; dst = dst.add(1); src = src.add(1); } dst.sub(n) } #[no_mangle] pub unsafe extern fn strlen(mut s: *const u8) -> usize { let mut num = 0; while *s != 0 { num += 1; s = s.add(1); } num } #[no_mangle] pub unsafe extern fn memset(mut s: *mut u8, c: c_int, n: usize) -> *mut u8 { let end = s.add(n); let c = c as u8; while s != end { *s = c; s = s.add(1); } end.sub(n) } #[cfg(target_arch = "arm")] pub mod arch { use cty_base::types::{c_int}; macro_rules! memclr { ($name:ident) => { #[no_mangle] pub unsafe extern fn $name(dst: *mut u8, n: usize) { super::memset(dst, 0, n); } } } memclr!(__aeabi_memclr); memclr!(__aeabi_memclr4); memclr!(__aeabi_memclr8); macro_rules! memcpy { ($name:ident) => { #[no_mangle] pub unsafe extern fn $name(dst: *mut u8, src: *const u8, n: usize) { super::memcpy(dst, src, n); } } } memcpy!(__aeabi_memcpy); memcpy!(__aeabi_memcpy4); memcpy!(__aeabi_memcpy8); macro_rules! memmove { ($name:ident) => { #[no_mangle] pub unsafe extern fn $name(dst: *mut u8, src: *const u8, n: usize) { super::memmove(dst, src, n); } } } memmove!(__aeabi_memmove); memmove!(__aeabi_memmove4); memmove!(__aeabi_memmove8); macro_rules! memset { ($name:ident) => { #[no_mangle] pub unsafe extern fn $name(dst: *mut u8, n: usize, val: c_int) { super::memset(dst, val, n); } } } memset!(__aeabi_memset); memset!(__aeabi_memset4); memset!(__aeabi_memset8); #[no_mangle] pub fn __aeabi_unwind_cpp_pr1() { } #[no_mangle] pub fn __aeabi_unwind_cpp_pr0() { } }<|fim▁end|>
// memcpy copies from the tail return memcpy(dst, src, n); }
<|file_name|>base_counters.py<|end_file_name|><|fim▁begin|>import re import time class BaseCounters:<|fim▁hole|> def ping(self, key): self.validate_key(key) self.do_ping(key, int(time.time())) def hit(self, key, n=1): self.validate_key(key) self.do_hit(key, n) def validate_key(self, key): if re.match(self.keyre, key): pass else: raise ValueError("Counters keys must only contain letters, numbers, the underscore (_) and fullstop (.), received \"%s\"" % key)<|fim▁end|>
def __init__(self): self.keyre = re.compile('\A[\w.]+\Z')
<|file_name|>panorama_js_panlet_icon_widgets_settings.js<|end_file_name|><|fim▁begin|>/* Shape Settings Tab */ TP.shapesStore = Ext.create('Ext.data.Store', { fields: ['name', 'data'], proxy: { type: 'ajax', url: 'panorama.cgi?task=userdata_shapes', reader: { type: 'json', root: 'data' } }, data : thruk_shape_data }); TP.iconsetsStore = Ext.create('Ext.data.Store', { fields: ['name', 'sample', 'value', 'fileset'], proxy: { type: 'ajax', url: 'panorama.cgi?task=userdata_iconsets&withempty=1', reader: { type: 'json', root: 'data' } }, autoLoad: true, data : thruk_iconset_data }); TP.iconTypesStore = Ext.create('Ext.data.Store', { fields: ['name', 'value', 'icon'], autoLoad: false, data : [{value:'TP.HostStatusIcon', name:'Host', icon:url_prefix+'plugins/panorama/images/server.png'}, {value:'TP.HostgroupStatusIcon', name:'Hostgroup', icon:url_prefix+'plugins/panorama/images/server_link.png'}, {value:'TP.ServiceStatusIcon', name:'Service', icon:url_prefix+'plugins/panorama/images/computer.png'}, {value:'TP.ServicegroupStatusIcon', name:'Service Group', icon:url_prefix+'plugins/panorama/images/computer_link.png'}, {value:'TP.FilterStatusIcon', name:'Custom Filter', icon:url_prefix+'plugins/panorama/images/page_find.png'} ] }); TP.iconSettingsWindow = undefined; TP.iconShowEditDialog = function(panel) { panel.stateful = false; var tab = Ext.getCmp(panel.panel_id); var lastType = panel.xdata.appearance.type; // make sure only one window is open at a time if(TP.iconSettingsWindow != undefined) { TP.iconSettingsWindow.destroy(); } tab.disableMapControlsTemp(); TP.resetMoveIcons(); TP.skipRender = false; var defaultSpeedoSource = 'problems'; var perfDataUpdate = function() { // ensure fresh and correct performance data window.perfdata = {}; panel.setIconLabel(undefined, true); // update speedo var data = [['number of problems', 'problems'], ['number of problems (incl. warnings)', 'problems_warn']]; for(var key in perfdata) { if(defaultSpeedoSource == 'problems') { defaultSpeedoSource = 'perfdata:'+key; } var r = TP.getPerfDataMinMax(perfdata[key], '?'); var options = r.min+" - "+r.max; data.push(['Perf. Data: '+key+' ('+options+')', 'perfdata:'+key]); } /* use availability data as source */ var xdata = TP.get_icon_form_xdata(settingsWindow); if(xdata.label && xdata.label.labeltext && TP.availabilities && TP.availabilities[panel.id]) { var avail = TP.availabilities[panel.id]; for(var key in avail) { var d = avail[key]; var last = d.last != undefined ? d.last : '...'; if(last == -1) { last = '...'; } var options = d.opts['d']; if(d.opts['tm']) { options += '/'+d.opts['tm']; } data.push(['Availability: '+last+'% ('+options+')', 'avail:'+key]); } } var cbo = Ext.getCmp('speedosourceStore'); TP.updateArrayStoreKV(cbo.store, data); // update shape var data = [['fixed', 'fixed']]; for(var key in perfdata) { var r = TP.getPerfDataMinMax(perfdata[key], 100); var options = r.min+" - "+r.max; data.push(['Perf. Data: '+key+' ('+options+')', 'perfdata:'+key]); } var cbo = Ext.getCmp('shapesourceStore'); TP.updateArrayStoreKV(cbo.store, data); var cbo = Ext.getCmp('connectorsourceStore'); TP.updateArrayStoreKV(cbo.store, data); } /* General Settings Tab */ var stateUpdate = function() { var xdata = TP.get_icon_form_xdata(settingsWindow); TP.updateAllIcons(Ext.getCmp(panel.panel_id), panel.id, xdata); labelUpdate(); // update performance data stores perfDataUpdate(); } var generalItems = panel.getGeneralItems(); if(generalItems != undefined && panel.xdata.cls != 'TP.StaticIcon') { generalItems.unshift({ xtype: 'combobox', name: 'newcls', fieldLabel: 'Filter Type', displayField: 'name', valueField: 'value', store: TP.iconTypesStore, editable: false, listConfig : { getInnerTpl: function(displayField) { return '<div class="x-combo-list-item"><img src="{icon}" height=16 width=16 style="vertical-align:top; margin-right: 3px;">{name}<\/div>'; } }, value: panel.xdata.cls, listeners: { change: function(This, newValue, oldValue, eOpts) { if(TP.iconSettingsWindow == undefined) { return; } TP.iconSettingsWindow.mask('changing...'); var key = panel.id; var xdata = TP.get_icon_form_xdata(settingsWindow); var conf = {xdata: xdata}; conf.xdata.cls = newValue; panel.redrawOnly = true; panel.destroy(); TP.timeouts['timeout_' + key + '_show_settings'] = window.setTimeout(function() { TP.iconSettingsWindow.skipRestore = true; /* does not exist when changing a newly placed icon */ if(TP.cp.state[key]) { TP.cp.state[key].xdata.cls = newValue; } panel = TP.add_panlet({id:key, skip_state:true, tb:tab, autoshow:true, state:conf, type:newValue}, false); panel.xdata = conf.xdata; panel.classChanged = newValue; TP.iconShowEditDialog(panel); TP.cp.state[key].xdata.cls = oldValue; }, 50); } } }); } var generalTab = { title : 'General', type : 'panel', hidden: generalItems != undefined ? false : true, items: [{ xtype : 'panel', layout: 'fit', border: 0, items: [{ xtype: 'form', id: 'generalForm', bodyPadding: 2, border: 0, bodyStyle: 'overflow-y: auto;', submitEmptyText: false, defaults: { anchor: '-12', labelWidth: panel.generalLabelWidth || 132, listeners: { change: function(This, newValue, oldValue, eOpts) { if(newValue != "") { stateUpdate() } } } }, items: generalItems }] }] }; var updateDisabledFields = function(xdata) { var originalRenderUpdate = renderUpdate; renderUpdate = Ext.emptyFn; Ext.getCmp('shapeheightfield').setDisabled(xdata.appearance.shapelocked); Ext.getCmp('shapetogglelocked').toggle(xdata.appearance.shapelocked); Ext.getCmp('pieheightfield').setDisabled(xdata.appearance.pielocked); Ext.getCmp('pietogglelocked').toggle(xdata.appearance.pielocked); if(xdata.appearance.type == "connector" || xdata.appearance.type == "none") { Ext.getCmp('rotationfield').setVisible(false); } else { Ext.getCmp('rotationfield').setVisible(true); } renderUpdate = originalRenderUpdate; }; /* Layout Settings Tab */ var layoutTab = { title: 'Layout', type: 'panel', items: [{ xtype : 'panel', layout: 'fit', border: 0, items: [{ xtype: 'form', id: 'layoutForm', bodyPadding: 2, border: 0, bodyStyle: 'overflow-y: auto;', submitEmptyText: false, defaults: { anchor: '-12', labelWidth: 80 }, items: [{ fieldLabel: 'Position', xtype: 'fieldcontainer', layout: 'table', items: [{ xtype: 'label', text: 'x:', style: 'margin-left: 0; margin-right: 2px;' }, { xtype: 'numberfield', name: 'x', width: 55, value: panel.xdata.layout.x, listeners: { change: function(This, newValue, oldValue, eOpts) { if(!panel.noMoreMoves) { panel.noMoreMoves = true; var y = Number(This.up('panel').getValues().y); panel.setPosition(newValue, y); panel.noMoreMoves = false; } } }}, { xtype: 'label', text: 'y:', style: 'margin-left: 10px; margin-right: 2px;' }, { xtype: 'numberfield', name: 'y', width: 55, value: panel.xdata.layout.y, listeners: { change: function(This, newValue, oldValue, eOpts) { if(!panel.noMoreMoves) { panel.noMoreMoves = true; var x = Number(This.up('panel').getValues().x); panel.setPosition(x, newValue); panel.noMoreMoves = false; } } }}, { xtype: 'label', text: '(use cursor keys)', style: 'margin-left: 10px;', cls: 'form-hint' } ] }, { fieldLabel: 'Rotation', xtype: 'numberunit', allowDecimals: false, name: 'rotation', id: 'rotationfield', unit: '°', minValue: -360, maxValue: 360, step: 15, value: panel.xdata.layout.rotation != undefined ? panel.xdata.layout.rotation : 0, listeners: { change: function(This) { var xdata = TP.get_icon_form_xdata(settingsWindow); panel.applyRotation(This.value, xdata); } } }, { fieldLabel: 'Z-Index', xtype: 'numberfield', allowDecimals: false, name: 'zindex', minValue: -10, maxValue: 100, step: 1, value: panel.xdata.layout.zindex != undefined ? panel.xdata.layout.zindex : 0, listeners: { change: function(This) { var xdata = TP.get_icon_form_xdata(settingsWindow); panel.applyZindex(This.value, xdata); } } }, { fieldLabel: 'Scale', id: 'layoutscale', xtype: 'numberunit', unit: '%', allowDecimals: true, name: 'scale', minValue: 0, maxValue: 10000, step: 1, value: panel.xdata.layout.scale != undefined ? panel.xdata.layout.scale : 100, listeners: { change: function(This) { var xdata = TP.get_icon_form_xdata(settingsWindow); panel.applyScale(This.value, xdata); } }, disabled: (panel.hasScale || panel.xdata.appearance.type == 'icon') ? false : true, hidden: panel.iconType == 'text' ? true : false }] }] }] }; TP.shapesStore.load(); var renderUpdate = Ext.emptyFn; var renderUpdateDo = function(forceColor, forceRenderItem) { if(TP.skipRender) { return; } var xdata = TP.get_icon_form_xdata(settingsWindow); if(panel.iconType == 'image') { panel.setRenderItem(xdata); } if(xdata.appearance == undefined) { return; } if(xdata.appearance.type == undefined) { return; } if(xdata.appearance.type == 'shape') { forceRenderItem = true; } if(xdata.appearance.type != lastType || forceRenderItem) { if(panel.setRenderItem) { panel.setRenderItem(xdata, forceRenderItem); } } lastType = xdata.appearance.type; if(xdata.appearance.type == 'shape') { panel.shapeRender(xdata, forceColor); } if(xdata.appearance.type == 'pie') { panel.pieRender(xdata, forceColor); } if(xdata.appearance.type == 'speedometer') { panel.speedoRender(xdata, forceColor); } if(xdata.appearance.type == 'connector') { panel.connectorRender(xdata, forceColor); } labelUpdate(); updateDisabledFields(xdata); } var appearanceTab = { title: 'Appearance', type: 'panel', hidden: panel.hideAppearanceTab, listeners: { show: perfDataUpdate }, items: [{ xtype : 'panel', layout: 'fit', border: 0, items: [{ xtype: 'form', id: 'appearanceForm', bodyPadding: 2, border: 0, bodyStyle: 'overflow-y: auto;', submitEmptyText: false, defaults: { anchor: '-12', labelWidth: 60, listeners: { change: function() { renderUpdate(); } } }, items: [{ /* appearance type */ xtype: 'combobox', fieldLabel: 'Type', name: 'type', store: [['none','Label Only'], ['icon','Icon'], ['connector', 'Line / Arrow / Watermark'], ['pie', 'Pie Chart'], ['speedometer', 'Speedometer'], ['shape', 'Shape']], id: 'appearance_types', editable: false, listeners: { change: function(This, newValue, oldValue, eOpts) { Ext.getCmp('appearanceForm').items.each(function(f, i) { if(f.cls != undefined) { if(f.cls.match(newValue)) { f.show(); } else {<|fim▁hole|> if(newValue == 'icon' || panel.hasScale) { Ext.getCmp('layoutscale').setDisabled(false); } else { Ext.getCmp('layoutscale').setDisabled(true); } if(newValue == 'shape') { // fill in defaults var values = Ext.getCmp('appearanceForm').getForm().getValues(); if(!values['shapename']) { values['shapename'] = 'arrow'; values['shapelocked'] = true; values['shapewidth'] = 50; values['shapeheight'] = 50; values['shapecolor_ok'] = '#199C0F'; values['shapecolor_warning'] = '#CDCD0A'; values['shapecolor_critical'] = '#CA1414'; values['shapecolor_unknown'] = '#CC740F'; values['shapegradient'] = 0; values['shapesource'] = 'fixed'; } var originalRenderUpdate = renderUpdate; renderUpdate = Ext.emptyFn; Ext.getCmp('appearanceForm').getForm().setValues(values); renderUpdate = originalRenderUpdate; } if(newValue == 'pie') { // fill in defaults var values = Ext.getCmp('appearanceForm').getForm().getValues(); if(!values['piewidth']) { values['piewidth'] = 50; values['pieheight'] = 50; values['pielocked'] = true; values['pieshadow'] = false; values['piedonut'] = 0; values['pielabel'] = false; values['piegradient'] = 0; values['piecolor_ok'] = '#199C0F'; values['piecolor_warning'] = '#CDCD0A'; values['piecolor_critical'] = '#CA1414'; values['piecolor_unknown'] = '#CC740F'; values['piecolor_up'] = '#199C0F'; values['piecolor_down'] = '#CA1414'; values['piecolor_unreachable'] = '#CA1414'; } Ext.getCmp('appearanceForm').getForm().setValues(values); } if(newValue == 'speedometer') { // fill in defaults var values = Ext.getCmp('appearanceForm').getForm().getValues(); if(!values['speedowidth']) { values['speedowidth'] = 180; values['speedoshadow'] = false; values['speedoneedle'] = false; values['speedodonut'] = 0; values['speedogradient'] = 0; values['speedosource'] = defaultSpeedoSource; values['speedomargin'] = 5; values['speedosteps'] = 10; values['speedocolor_ok'] = '#199C0F'; values['speedocolor_warning'] = '#CDCD0A'; values['speedocolor_critical'] = '#CA1414'; values['speedocolor_unknown'] = '#CC740F'; values['speedocolor_bg'] = '#DDDDDD'; } Ext.getCmp('appearanceForm').getForm().setValues(values); } if(newValue == 'connector') { // fill in defaults var values = Ext.getCmp('appearanceForm').getForm().getValues(); if(!values['connectorwidth']) { var pos = panel.getPosition(); values['connectorfromx'] = pos[0]-100; values['connectorfromy'] = pos[1]; values['connectortox'] = pos[0]+100; values['connectortoy'] = pos[1]; values['connectorwidth'] = 3; values['connectorarrowtype'] = 'both'; values['connectorarrowwidth'] = 10; values['connectorarrowlength'] = 20; values['connectorarrowinset'] = 2; values['connectorcolor_ok'] = '#199C0F'; values['connectorcolor_warning'] = '#CDCD0A'; values['connectorcolor_critical'] = '#CA1414'; values['connectorcolor_unknown'] = '#CC740F'; values['connectorgradient'] = 0; values['connectorsource'] = 'fixed'; } var originalRenderUpdate = renderUpdate; renderUpdate = Ext.emptyFn; Ext.getCmp('appearanceForm').getForm().setValues(values); renderUpdate = originalRenderUpdate; } renderUpdate(); } } }, /* Icons */ { fieldLabel: 'Icon Set', id: 'iconset_field', xtype: 'combobox', name: 'iconset', cls: 'icon', store: TP.iconsetsStore, value: '', emptyText: 'use dashboards default icon set', displayField: 'name', valueField: 'value', listConfig : { getInnerTpl: function(displayField) { return '<div class="x-combo-list-item"><img src="{sample}" height=16 width=16 style="vertical-align:top; margin-right: 3px;">{name}<\/div>'; } }, listeners: { change: function(This) { renderUpdate(undefined, true); } } }, { xtype: 'panel', cls: 'icon', html: 'Place image sets in: '+usercontent_folder+'/images/status/', style: 'text-align: center;', bodyCls: 'form-hint', padding: '10 0 0 0', border: 0 }, /* Shapes */ { fieldLabel: 'Shape', xtype: 'combobox', name: 'shapename', cls: 'shape', store: TP.shapesStore, displayField: 'name', valueField: 'name', listConfig : { getInnerTpl: function(displayField) { TP.tmpid = 0; return '<div class="x-combo-list-item"><span name="{name}" height=16 width=16 style="vertical-align:top; margin-right: 3px;"><\/span>{name}<\/div>'; } }, listeners: { afterrender: function(This) { var me = This; me.shapes = []; This.getPicker().addListener('show', function(This) { Ext.Array.each(This.el.dom.getElementsByTagName('SPAN'), function(item, idx) { TP.show_shape_preview(item, panel, me.shapes); }); }); This.getPicker().addListener('refresh', function(This) { Ext.Array.each(This.el.dom.getElementsByTagName('SPAN'), function(item, idx) { TP.show_shape_preview(item, panel, me.shapes); }); }); }, destroy: function(This) { // clean up Ext.Array.each(This.shapes, function(item, idx) { item.destroy() }); }, change: function(This) { renderUpdate(); } } }, { fieldLabel: 'Size', xtype: 'fieldcontainer', name: 'shapesize', cls: 'shape', layout: 'table', defaults: { listeners: { change: function() { renderUpdate() } } }, items: [{ xtype: 'label', text: 'Width:', style: 'margin-left: 0; margin-right: 2px;' }, { xtype: 'numberunit', name: 'shapewidth', unit: 'px', width: 65, value: panel.xdata.appearance.shapewidth }, { xtype: 'label', text: 'Height:', style: 'margin-left: 10px; margin-right: 2px;' }, { xtype: 'numberunit', name: 'shapeheight', unit: 'px', width: 65, value: panel.xdata.appearance.shapeheight, id: 'shapeheightfield' }, { xtype: 'button', width: 22, icon: url_prefix+'plugins/panorama/images/link.png', enableToggle: true, style: 'margin-left: 2px; margin-top: -6px;', id: 'shapetogglelocked', toggleHandler: function(btn, state) { this.up('form').getForm().setValues({shapelocked: state ? '1' : '' }); renderUpdate(); } }, { xtype: 'hidden', name: 'shapelocked' } ] }, { fieldLabel: 'Colors', cls: 'shape', xtype: 'fieldcontainer', layout: { type: 'table', columns: 4, tableAttrs: { style: { width: '100%' } } }, defaults: { listeners: { change: function() { renderUpdateDo() } }, mouseover: function(color) { renderUpdateDo(color); }, mouseout: function(color) { renderUpdateDo(); } }, items: [ { xtype: 'label', text: panel.iconType == 'host' ? 'Up: ' : 'Ok: ' }, { xtype: 'colorcbo', name: 'shapecolor_ok', value: panel.xdata.appearance.shapecolor_ok, width: 80, tdAttrs: { style: 'padding-right: 10px;'}, colorGradient: { start: '#D3D3AE', stop: '#00FF00' } }, { xtype: 'label', text: panel.iconType == 'host' ? 'Unreachable: ' : 'Warning: ' }, { xtype: 'colorcbo', name: 'shapecolor_warning', value: panel.xdata.appearance.shapecolor_warning, width: 80, colorGradient: { start: '#E1E174', stop: '#FFFF00' } }, { xtype: 'label', text: panel.iconType == 'host' ? 'Down: ' : 'Critical: ' }, { xtype: 'colorcbo', name: 'shapecolor_critical', value: panel.xdata.appearance.shapecolor_critical, width: 80, colorGradient: { start: '#D3AEAE', stop: '#FF0000' } }, { xtype: 'label', text: 'Unknown: ', hidden: panel.iconType == 'host' ? true : false }, { xtype: 'colorcbo', name: 'shapecolor_unknown', value: panel.xdata.appearance.shapecolor_unknown, width: 80, colorGradient: { start: '#DAB891', stop: '#FF8900' }, hidden: panel.iconType == 'host' ? true : false }] }, { fieldLabel: 'Gradient', cls: 'shape', xtype: 'fieldcontainer', layout: { type: 'hbox', align: 'stretch' }, items: [{ xtype: 'numberfield', allowDecimals: true, name: 'shapegradient', maxValue: 1, minValue: -1, step: 0.05, value: panel.xdata.appearance.shapegradient, width: 55, listeners: { change: function() { renderUpdate(); } } }, { xtype: 'label', text: 'Source:', margins: {top: 2, right: 2, bottom: 0, left: 10} }, { name: 'shapesource', xtype: 'combobox', id: 'shapesourceStore', displayField: 'name', valueField: 'value', queryMode: 'local', store: { fields: ['name', 'value'], data: [] }, editable: false, value: panel.xdata.appearance.shapesource, listeners: { focus: perfDataUpdate, change: function() { renderUpdate(); } }, flex: 1 }] }, { xtype: 'panel', cls: 'shape', html: 'Place shapes in: '+usercontent_folder+'/shapes/', style: 'text-align: center;', bodyCls: 'form-hint', padding: '10 0 0 0', border: 0 }, /* Connector */ { fieldLabel: 'From', xtype: 'fieldcontainer', name: 'connectorfrom', cls: 'connector', layout: { type: 'hbox', align: 'stretch' }, defaults: { listeners: { change: function() { renderUpdate(); } } }, items: [{ xtype: 'label', text: 'x', margins: {top: 3, right: 2, bottom: 0, left: 7} }, { xtype: 'numberunit', allowDecimals: false, name: 'connectorfromx', width: 70, unit: 'px', value: panel.xdata.appearance.connectorfromx }, { xtype: 'label', text: 'y', margins: {top: 3, right: 2, bottom: 0, left: 7} }, { xtype: 'numberunit', allowDecimals: false, name: 'connectorfromy', width: 70, unit: 'px', value: panel.xdata.appearance.connectorfromy },{ xtype: 'label', text: 'Endpoints', margins: {top: 3, right: 2, bottom: 0, left: 7} }, { xtype: 'combobox', name: 'connectorarrowtype', width: 70, matchFieldWidth: false, value: panel.xdata.appearance.connectorarrowtype, store: ['both', 'left', 'right', 'none'], listConfig : { getInnerTpl: function(displayField) { return '<div class="x-combo-list-item"><img src="'+url_prefix+'plugins/panorama/images/connector_type_{field1}.png" height=16 width=77 style="vertical-align:top; margin-right: 3px;"> {field1}<\/div>'; } } }] }, { fieldLabel: 'To', xtype: 'fieldcontainer', name: 'connectorto', cls: 'connector', layout: { type: 'hbox', align: 'stretch' }, defaults: { listeners: { change: function() { renderUpdate(); } } }, items: [{ xtype: 'label', text: 'x', margins: {top: 3, right: 2, bottom: 0, left: 7} }, { xtype: 'numberunit', allowDecimals: false, name: 'connectortox', width: 70, unit: 'px', value: panel.xdata.appearance.connectortox }, { xtype: 'label', text: 'y', margins: {top: 3, right: 2, bottom: 0, left: 7} }, { xtype: 'numberunit', allowDecimals: false, name: 'connectortoy', width: 70, unit: 'px', value: panel.xdata.appearance.connectortoy }] }, { fieldLabel: 'Size', xtype: 'fieldcontainer', name: 'connectorsize', cls: 'connector', layout: { type: 'hbox', align: 'stretch' }, defaults: { listeners: { change: function() { renderUpdate(); } } }, items: [{ xtype: 'label', text: 'Width', margins: {top: 3, right: 2, bottom: 0, left: 7} }, { xtype: 'numberunit', allowDecimals: false, name: 'connectorwidth', width: 60, unit: 'px', value: panel.xdata.appearance.connectorwidth }, { xtype: 'label', text: 'Variable Width', margins: {top: 3, right: 2, bottom: 0, left: 7} }, { xtype: 'checkbox', name: 'connectorvariable' }] }, { fieldLabel: 'Endpoints', xtype: 'fieldcontainer', name: 'connectorarrow', cls: 'connector', layout: { type: 'hbox', align: 'stretch' }, defaults: { listeners: { change: function() { renderUpdate(); } } }, items: [{ xtype: 'label', text: 'Width', margins: {top: 3, right: 2, bottom: 0, left: 7} }, { xtype: 'numberunit', allowDecimals: false, name: 'connectorarrowwidth', width: 60, unit: 'px', minValue: 0, value: panel.xdata.appearance.connectorarrowwidth }, { xtype: 'label', text: 'Length', margins: {top: 3, right: 2, bottom: 0, left: 7} }, { xtype: 'numberunit', allowDecimals: false, name: 'connectorarrowlength', width: 60, minValue: 0, unit: 'px', value: panel.xdata.appearance.connectorarrowlength }, { xtype: 'label', text: 'Inset', margins: {top: 3, right: 2, bottom: 0, left: 7} }, { xtype: 'numberunit', allowDecimals: false, name: 'connectorarrowinset', width: 60, unit: 'px', value: panel.xdata.appearance.connectorarrowinset }] }, { fieldLabel: 'Colors', cls: 'connector', xtype: 'fieldcontainer', layout: { type: 'table', columns: 4, tableAttrs: { style: { width: '100%' } } }, defaults: { listeners: { change: function() { renderUpdateDo() } }, mouseover: function(color) { renderUpdateDo(color); }, mouseout: function(color) { renderUpdateDo(); } }, items: [ { xtype: 'label', text: panel.iconType == 'host' ? 'Up ' : 'Ok ' }, { xtype: 'colorcbo', name: 'connectorcolor_ok', value: panel.xdata.appearance.connectorcolor_ok, width: 80, tdAttrs: { style: 'padding-right: 10px;'}, colorGradient: { start: '#D3D3AE', stop: '#00FF00' } }, { xtype: 'label', text: panel.iconType == 'host' ? 'Unreachable ' : 'Warning ' }, { xtype: 'colorcbo', name: 'connectorcolor_warning', value: panel.xdata.appearance.connectorcolor_warning, width: 80, colorGradient: { start: '#E1E174', stop: '#FFFF00' } }, { xtype: 'label', text: panel.iconType == 'host' ? 'Down ' : 'Critical ' }, { xtype: 'colorcbo', name: 'connectorcolor_critical', value: panel.xdata.appearance.connectorcolor_critical, width: 80, colorGradient: { start: '#D3AEAE', stop: '#FF0000' } }, { xtype: 'label', text: 'Unknown ', hidden: panel.iconType == 'host' ? true : false }, { xtype: 'colorcbo', name: 'connectorcolor_unknown', value: panel.xdata.appearance.connectorcolor_unknown, width: 80, colorGradient: { start: '#DAB891', stop: '#FF8900' }, hidden: panel.iconType == 'host' ? true : false }] }, { fieldLabel: 'Gradient', cls: 'connector', xtype: 'fieldcontainer', layout: { type: 'hbox', align: 'stretch' }, items: [{ xtype: 'numberfield', allowDecimals: true, name: 'connectorgradient', maxValue: 1, minValue: -1, step: 0.05, value: panel.xdata.appearance.connectorgradient, width: 55, listeners: { change: function() { renderUpdate(); } } }, { xtype: 'label', text: 'Source', margins: {top: 2, right: 2, bottom: 0, left: 10} }, { name: 'connectorsource', xtype: 'combobox', id: 'connectorsourceStore', displayField: 'name', valueField: 'value', queryMode: 'local', store: { fields: ['name', 'value'], data: [] }, editable: false, value: panel.xdata.appearance.connectorsource, listeners: { focus: perfDataUpdate, change: function() { renderUpdate(); } }, flex: 1 }] }, { fieldLabel: 'Options', xtype: 'fieldcontainer', cls: 'connector', layout: 'table', defaults: { listeners: { change: function() { renderUpdate(undefined, true) } } }, items: [ { xtype: 'label', text: 'Cust. Perf. Data Min', style: 'margin-left: 0px; margin-right: 2px;' }, { xtype: 'numberfield', allowDecimals: true, width: 70, name: 'connectormin', step: 100 }, { xtype: 'label', text: 'Max', style: 'margin-left: 8px; margin-right: 2px;' }, { xtype: 'numberfield', allowDecimals: true, width: 70, name: 'connectormax', step: 100 } ] }, /* Pie Chart */ { fieldLabel: 'Size', xtype: 'fieldcontainer', cls: 'pie', layout: 'table', defaults: { listeners: { change: function() { renderUpdate() } } }, items: [{ xtype: 'label', text: 'Width:', style: 'margin-left: 0; margin-right: 2px;' }, { xtype: 'numberunit', name: 'piewidth', unit: 'px', width: 65, value: panel.xdata.appearance.piewidth }, { xtype: 'label', text: 'Height:', style: 'margin-left: 10px; margin-right: 2px;' }, { xtype: 'numberunit', name: 'pieheight', unit: 'px', width: 65, value: panel.xdata.appearance.pieheight, id: 'pieheightfield' }, { xtype: 'button', width: 22, icon: url_prefix+'plugins/panorama/images/link.png', enableToggle: true, style: 'margin-left: 2px; margin-top: -6px;', id: 'pietogglelocked', toggleHandler: function(btn, state) { this.up('form').getForm().setValues({pielocked: state ? '1' : '' }); renderUpdate(); } }, { xtype: 'hidden', name: 'pielocked' } ] }, { fieldLabel: 'Options', xtype: 'fieldcontainer', cls: 'pie', layout: 'table', defaults: { listeners: { change: function() { renderUpdate(undefined, true) } } }, items: [ { xtype: 'label', text: 'Shadow:', style: 'margin-left: 0px; margin-right: 2px;', hidden: true }, { xtype: 'checkbox', name: 'pieshadow', hidden: true }, { xtype: 'label', text: 'Label Name:', style: 'margin-left: 8px; margin-right: 2px;' }, { xtype: 'checkbox', name: 'pielabel' }, { xtype: 'label', text: 'Label Value:', style: 'margin-left: 8px; margin-right: 2px;' }, { xtype: 'checkbox', name: 'pielabelval' }, { xtype: 'label', text: 'Donut:', style: 'margin-left: 8px; margin-right: 2px;' }, { xtype: 'numberunit', allowDecimals: false, width: 60, name: 'piedonut', unit: 'px' }] }, { fieldLabel: 'Colors', cls: 'pie', xtype: 'fieldcontainer', layout: { type: 'table', columns: 4, tableAttrs: { style: { width: '100%' } } }, defaults: { listeners: { change: function() { renderUpdateDo() } }, mouseover: function(color) { renderUpdateDo(color); }, mouseout: function(color) { renderUpdateDo(); } }, items: [ { xtype: 'label', text: 'Ok:' }, { xtype: 'colorcbo', name: 'piecolor_ok', value: panel.xdata.appearance.piecolor_ok, width: 80, tdAttrs: { style: 'padding-right: 10px;'}, colorGradient: { start: '#D3D3AE', stop: '#00FF00' } }, { xtype: 'label', text: 'Warning:' }, { xtype: 'colorcbo', name: 'piecolor_warning', value: panel.xdata.appearance.piecolor_warning, width: 80, colorGradient: { start: '#E1E174', stop: '#FFFF00' } }, { xtype: 'label', text: 'Critical:' }, { xtype: 'colorcbo', name: 'piecolor_critical', value: panel.xdata.appearance.piecolor_critical, width: 80, colorGradient: { start: '#D3AEAE', stop: '#FF0000' } }, { xtype: 'label', text: 'Unknown:' }, { xtype: 'colorcbo', name: 'piecolor_unknown', value: panel.xdata.appearance.piecolor_unknown, width: 80, colorGradient: { start: '#DAB891', stop: '#FF8900' } }, { xtype: 'label', text: 'Up:' }, { xtype: 'colorcbo', name: 'piecolor_up', value: panel.xdata.appearance.piecolor_up, width: 80, colorGradient: { start: '#D3D3AE', stop: '#00FF00' } }, { xtype: 'label', text: 'Down:' }, { xtype: 'colorcbo', name: 'piecolor_down', value: panel.xdata.appearance.piecolor_down, width: 80, colorGradient: { start: '#D3AEAE', stop: '#FF0000' } }, { xtype: 'label', text: 'Unreachable:' }, { xtype: 'colorcbo', name: 'piecolor_unreachable', value: panel.xdata.appearance.piecolor_unreachable, width: 80, colorGradient: { start: '#D3AEAE', stop: '#FF0000' } }, { xtype: 'label', text: 'Gradient:' }, { xtype: 'numberfield', allowDecimals: true, width: 80, name: 'piegradient', maxValue: 1, minValue: -1, step: 0.05, value: panel.xdata.appearance.piegradient } ] }, /* Speedometer Chart */ { fieldLabel: 'Size', xtype: 'fieldcontainer', cls: 'speedometer', layout: 'table', defaults: { listeners: { change: function() { renderUpdate(undefined, true) } } }, items: [{ xtype: 'label', text: 'Width:', style: 'margin-left: 0; margin-right: 2px;' }, { xtype: 'numberunit', name: 'speedowidth', unit: 'px', width: 65, value: panel.xdata.appearance.speedowidth }, { xtype: 'label', text: 'Shadow:', style: 'margin-left: 0px; margin-right: 2px;', hidden: true }, { xtype: 'checkbox', name: 'speedoshadow', hidden: true }, { xtype: 'label', text: 'Needle:', style: 'margin-left: 8px; margin-right: 2px;' }, { xtype: 'checkbox', name: 'speedoneedle' }, { xtype: 'label', text: 'Donut:', style: 'margin-left: 8px; margin-right: 2px;' }, { xtype: 'numberunit', allowDecimals: false, width: 60, name: 'speedodonut', unit: 'px' } ] }, { fieldLabel: 'Axis', xtype: 'fieldcontainer', cls: 'speedometer', layout: 'table', defaults: { listeners: { change: function() { renderUpdate(undefined, true) } } }, items: [ { xtype: 'label', text: 'Steps:', style: 'margin-left: 0px; margin-right: 2px;' }, { xtype: 'numberfield', allowDecimals: false, width: 60, name: 'speedosteps', step: 1, minValue: 0, maxValue: 1000 }, { xtype: 'label', text: 'Margin:', style: 'margin-left: 8px; margin-right: 2px;' }, { xtype: 'numberunit', allowDecimals: false, width: 60, name: 'speedomargin', unit: 'px' }] }, { fieldLabel: 'Colors', cls: 'speedometer', xtype: 'fieldcontainer', layout: { type: 'table', columns: 4, tableAttrs: { style: { width: '100%' } } }, defaults: { listeners: { change: function() { renderUpdateDo() } }, mouseover: function(color) { renderUpdateDo(color); }, mouseout: function(color) { renderUpdateDo(); } }, items: [ { xtype: 'label', text: panel.iconType == 'host' ? 'Up: ' : 'Ok: ' }, { xtype: 'colorcbo', name: 'speedocolor_ok', value: panel.xdata.appearance.speedocolor_ok, width: 80, tdAttrs: { style: 'padding-right: 10px;'}, colorGradient: { start: '#D3D3AE', stop: '#00FF00' } }, { xtype: 'label', text: panel.iconType == 'host' ? 'Unreachable: ' : 'Warning: ' }, { xtype: 'colorcbo', name: 'speedocolor_warning', value: panel.xdata.appearance.speedocolor_warning, width: 80, colorGradient: { start: '#E1E174', stop: '#FFFF00' } }, { xtype: 'label', text: panel.iconType == 'host' ? 'Down: ' : 'Critical: ' }, { xtype: 'colorcbo', name: 'speedocolor_critical', value: panel.xdata.appearance.speedocolor_critical, width: 80, colorGradient: { start: '#D3AEAE', stop: '#FF0000' } }, { xtype: 'label', text: 'Unknown:' }, { xtype: 'colorcbo', name: 'speedocolor_unknown', value: panel.xdata.appearance.speedocolor_unknown, width: 80, colorGradient: { start: '#DAB891', stop: '#FF8900' } }, { xtype: 'label', text: 'Background:' }, { xtype: 'colorcbo', name: 'speedocolor_bg', value: panel.xdata.appearance.speedocolor_bg, width: 80 }, { xtype: 'label', text: 'Gradient:' }, { xtype: 'numberfield', allowDecimals: true, width: 80, name: 'speedogradient', maxValue: 1, minValue: -1, step: 0.05, value: panel.xdata.appearance.speedogradient } ] }, { fieldLabel: 'Source', name: 'speedosource', xtype: 'combobox', cls: 'speedometer', id: 'speedosourceStore', displayField: 'name', valueField: 'value', queryMode: 'local', store: { fields: ['name', 'value'], data: [] }, editable: false, listeners: { focus: perfDataUpdate, change: function() { renderUpdate(undefined, true) } } }, { fieldLabel: 'Options', xtype: 'fieldcontainer', cls: 'speedometer', layout: 'table', defaults: { listeners: { change: function() { renderUpdate(undefined, true) } } }, items: [{ xtype: 'label', text: 'Invert:', style: 'margin-left: 0; margin-right: 2px;' }, { xtype: 'checkbox', name: 'speedoinvert' }, { xtype: 'label', text: 'Min:', style: 'margin-left: 8px; margin-right: 2px;' }, { xtype: 'numberfield', allowDecimals: true, width: 70, name: 'speedomin', step: 100 }, { xtype: 'label', text: 'Max:', style: 'margin-left: 8px; margin-right: 2px;' }, { xtype: 'numberfield', allowDecimals: true, width: 70, name: 'speedomax', step: 100 } ] }] }] }] }; /* Link Settings Tab */ var server_actions_menu = []; Ext.Array.each(action_menu_actions, function(name, i) { server_actions_menu.push({ text: name, icon: url_prefix+'plugins/panorama/images/cog.png', handler: function(This, eOpts) { This.up('form').getForm().setValues({link: 'server://'+name+'/'}) } }); }); var action_menus_menu = []; Ext.Array.each(action_menu_items, function(val, i) { var name = val[0]; action_menus_menu.push({ text: name, icon: url_prefix+'plugins/panorama/images/cog.png', handler: function(This, eOpts) { This.up('form').getForm().setValues({link: 'menu://'+name+'/'}) } }); }); var linkTab = { title: 'Link', type: 'panel', items: [{ xtype : 'panel', layout: 'fit', border: 0, items: [{ xtype: 'form', id: 'linkForm', bodyPadding: 2, border: 0, bodyStyle: 'overflow-y: auto;', submitEmptyText: false, defaults: { anchor: '-12', labelWidth: 132 }, items: [{ fieldLabel: 'Hyperlink', xtype: 'textfield', name: 'link', emptyText: 'http://... or predefined from below' }, { fieldLabel: 'Predefined Links', xtype: 'fieldcontainer', items: [{ xtype: 'button', text: 'Choose', icon: url_prefix+'plugins/panorama/images/world.png', menu: { items: [{ text: 'My Dashboards', icon: url_prefix+'plugins/panorama/images/user_suit.png', menu: [{ text: 'Loading...', icon: url_prefix+'plugins/panorama/images/loading-icon.gif', disabled: true }] }, { text: 'Public Dashboards', icon: url_prefix+'plugins/panorama/images/world.png', menu: [{ text: 'Loading...', icon: url_prefix+'plugins/panorama/images/loading-icon.gif', disabled: true }] }, { text: 'Show Details', icon: url_prefix+'plugins/panorama/images/application_view_columns.png', handler: function(This, eOpts) { This.up('form').getForm().setValues({link: 'dashboard://show_details'}) } }, { text: 'Refresh', icon: url_prefix+'plugins/panorama/images/arrow_refresh.png', handler: function(This, eOpts) { This.up('form').getForm().setValues({link: 'dashboard://refresh'}) } }, { text: 'Server Actions', icon: url_prefix+'plugins/panorama/images/lightning_go.png', menu: server_actions_menu, disabled: server_actions_menu.length > 0 ? false : true }, { text: 'Action Menus', icon: url_prefix+'plugins/panorama/images/lightning_go.png', menu: action_menus_menu, disabled: action_menus_menu.length > 0 ? false : true }], listeners: { afterrender: function(This, eOpts) { TP.load_dashboard_menu_items(This.items.get(0).menu, 'panorama.cgi?task=dashboard_list&list=my', function(val) { This.up('form').getForm().setValues({link: 'dashboard://'+val.replace(/^tabpan-tab_/,'')})}, true); TP.load_dashboard_menu_items(This.items.get(1).menu, 'panorama.cgi?task=dashboard_list&list=public', function(val) { This.up('form').getForm().setValues({link: 'dashboard://'+val.replace(/^tabpan-tab_/,'')})}, true); } } } }] }, { fieldLabel: 'New Tab', xtype: 'checkbox', name: 'newtab', boxLabel: '(opens links in new tab or window)' }] }] }] }; /* Label Settings Tab */ var labelUpdate = function() { var xdata = TP.get_icon_form_xdata(settingsWindow); panel.setIconLabel(xdata.label || {}, true); }; var labelTab = { title: 'Label', type: 'panel', items: [{ xtype : 'panel', layout: 'fit', border: 0, items: [{ xtype: 'form', id: 'labelForm', bodyPadding: 2, border: 0, bodyStyle: 'overflow-y: auto;', submitEmptyText: false, defaults: { anchor: '-12', labelWidth: 80, listeners: { change: labelUpdate } }, items: [{ fieldLabel: 'Labeltext', xtype: 'fieldcontainer', layout: { type: 'hbox', align: 'stretch' }, items: [{ xtype: 'textfield', name: 'labeltext', flex: 1, id: 'label_textfield', listeners: { change: labelUpdate } }, { xtype: 'button', icon: url_prefix+'plugins/panorama/images/lightning_go.png', margins: {top: 0, right: 0, bottom: 0, left: 3}, tooltip: 'open label editor wizard', handler: function(btn) { TP.openLabelEditorWindow(panel); } }] }, { fieldLabel: 'Color', xtype: 'colorcbo', name: 'fontcolor', value: '#000000', mouseover: function(color) { var oldValue=this.getValue(); this.setValue(color); labelUpdate(); this.setRawValue(oldValue); }, mouseout: function(color) { labelUpdate(); } }, { xtype: 'fieldcontainer', fieldLabel: 'Font', layout: { type: 'hbox', align: 'stretch' }, defaults: { listeners: { change: labelUpdate } }, items: [{ name: 'fontfamily', xtype: 'fontcbo', value: '', flex: 1, editable: false }, { xtype: 'numberunit', allowDecimals: false, name: 'fontsize', width: 60, unit: 'px', margins: {top: 0, right: 0, bottom: 0, left: 3}, value: panel.xdata.label.fontsize != undefined ? panel.xdata.label.fontsize : 14 }, { xtype: 'hiddenfield', name: 'fontitalic', value: panel.xdata.label.fontitalic }, { xtype: 'button', enableToggle: true, name: 'fontitalic', icon: url_prefix+'plugins/panorama/images/text_italic.png', margins: {top: 0, right: 0, bottom: 0, left: 3}, toggleHandler: function(btn, state) { this.up('form').getForm().setValues({fontitalic: state ? '1' : '' }); }, listeners: { afterrender: function() { if(panel.xdata.label.fontitalic) { this.toggle(); } } } }, { xtype: 'hiddenfield', name: 'fontbold', value: panel.xdata.label.fontbold }, { xtype: 'button', enableToggle: true, name: 'fontbold', icon: url_prefix+'plugins/panorama/images/text_bold.png', margins: {top: 0, right: 0, bottom: 0, left: 3}, toggleHandler: function(btn, state) { this.up('form').getForm().setValues({fontbold: state ? '1' : ''}); }, listeners: { afterrender: function() { if(panel.xdata.label.fontbold) { this.toggle(); } } } }] }, { xtype: 'fieldcontainer', fieldLabel: 'Position', layout: { type: 'hbox', align: 'stretch' }, defaults: { listeners: { change: labelUpdate } }, items: [{ name: 'position', xtype: 'combobox', store: ['below', 'above', 'left', 'right', 'center', 'top-left'], value: 'below', flex: 1, editable: false }, { xtype: 'label', text: 'Offset: x', margins: {top: 3, right: 2, bottom: 0, left: 7} }, { xtype: 'numberunit', allowDecimals: false, name: 'offsetx', width: 60, unit: 'px' }, { xtype: 'label', text: 'y', margins: {top: 3, right: 2, bottom: 0, left: 7} }, { xtype: 'numberunit', allowDecimals: false, name: 'offsety', width: 60, unit: 'px' }] }, { fieldLabel: 'Orientation', name: 'orientation', xtype: 'combobox', store: ['horizontal', 'vertical'], value: 'horizontal', editable: false }, { fieldLabel: 'Background', xtype: 'colorcbo', name: 'bgcolor', value: '', mouseover: function(color) { var oldValue=this.getValue(); this.setValue(color); labelUpdate(); this.setRawValue(oldValue); }, mouseout: function(color) { labelUpdate(); } }, { xtype: 'fieldcontainer', fieldLabel: 'Border', layout: { type: 'hbox', align: 'stretch' }, defaults: { listeners: { change: labelUpdate } }, items: [{ xtype: 'colorcbo', name: 'bordercolor', value: '', mouseover: function(color) { var oldValue=this.getValue(); this.setValue(color); labelUpdate(); this.setRawValue(oldValue); }, mouseout: function(color) { labelUpdate(); }, flex: 1, margins: {top: 0, right: 3, bottom: 0, left: 0} }, { xtype: 'numberunit', allowDecimals: false, name: 'bordersize', width: 60, unit: 'px' }] }, { fieldLabel: 'Backgr. Size', xtype: 'fieldcontainer', layout: 'table', items: [{ xtype: 'label', text: 'width:', style: 'margin-left: 0; margin-right: 2px;' }, { xtype: 'numberfield', name: 'width', width: 55, value: panel.xdata.label.width, listeners: { change: function(This, newValue, oldValue, eOpts) { labelUpdate(); } }}, { xtype: 'label', text: 'height:', style: 'margin-left: 10px; margin-right: 2px;' }, { xtype: 'numberfield', name: 'height', width: 55, value: panel.xdata.label.height, listeners: { change: function(This, newValue, oldValue, eOpts) { labelUpdate(); } }} ] } ] }] }] }; /* Source Tab */ var sourceTab = { title: 'Source', type: 'panel', listeners: { activate: function(This) { var xdata = TP.get_icon_form_xdata(settingsWindow); var j = Ext.JSON.encode(xdata); try { j = JSON.stringify(xdata, null, 2); } catch(err) { TP.logError(panel.id, "jsonStringifyException", err); } this.down('form').getForm().setValues({source: j, sourceError: ''}); } }, items: [{ xtype : 'panel', layout: 'fit', border: 0, items: [{ xtype: 'form', id: 'sourceForm', bodyPadding: 2, border: 0, bodyStyle: 'overflow-y: auto;', submitEmptyText: false, defaults: { anchor: '-12', labelWidth: 50 }, items: [{ fieldLabel: 'Source', xtype: 'textarea', name: 'source', height: 190 }, { fieldLabel: ' ', labelSeparator: '', xtype: 'fieldcontainer', items: [{ xtype: 'button', name: 'sourceapply', text: 'Apply', width: 100, handler: function(btn) { var values = Ext.getCmp('sourceForm').getForm().getValues(); try { var xdata = Ext.JSON.decode(values.source); TP.setIconSettingsValues(xdata); } catch(err) { TP.logError(panel.id, "jsonDecodeException", err); Ext.getCmp('sourceForm').getForm().setValues({sourceError: err}); } } }] }, { fieldLabel: ' ', labelSeparator: '', xtype: 'displayfield', name: 'sourceError', value: '' }] }] }] }; var tabPanel = new Ext.TabPanel({ activeTab : panel.initialSettingsTab ? panel.initialSettingsTab : 0, enableTabScroll : true, items : [ generalTab, layoutTab, appearanceTab, linkTab, labelTab, sourceTab ] }); /* add current available backends */ var backendItem = TP.getFormField(Ext.getCmp("generalForm"), 'backends'); if(backendItem) { TP.updateArrayStoreKV(backendItem.store, TP.getAvailableBackendsTab(tab)); if(backendItem.store.count() <= 1) { backendItem.hide(); } } var settingsWindow = new Ext.Window({ height: 350, width: 400, layout: 'fit', items: tabPanel, panel: panel, title: 'Icon Settings', buttonAlign: 'center', fbar: [/* panlet setting cancel button */ { xtype: 'button', text: 'cancel', handler: function(This) { settingsWindow.destroy(); } }, /* panlet setting save button */ { xtype: 'button', text: 'save', handler: function() { settingsWindow.skipRestore = true; panel.stateful = true; delete panel.xdata.label; delete panel.xdata.link; var xdata = TP.get_icon_form_xdata(settingsWindow); TP.log('['+this.id+'] icon config updated: '+Ext.JSON.encode(xdata)); for(var key in xdata) { panel.xdata[key] = xdata[key]; } panel.applyState({xdata: panel.xdata}); if(panel.classChanged) { panel.xdata.cls = panel.classChanged; } panel.forceSaveState(); delete TP.iconSettingsWindow; settingsWindow.destroy(); panel.firstRun = false; panel.applyXdata(); var tab = Ext.getCmp(panel.panel_id); TP.updateAllIcons(tab, panel.id); TP.updateAllLabelAvailability(tab, panel.id); } } ], listeners: { afterRender: function (This) { var form = This.items.getAt(0).items.getAt(1).down('form').getForm(); this.nav = Ext.create('Ext.util.KeyNav', this.el, { 'left': function(evt){ form.setValues({x: Number(form.getValues().x)-1}); }, 'right': function(evt){ form.setValues({x: Number(form.getValues().x)+1}); }, 'up': function(evt){ form.setValues({y: Number(form.getValues().y)-1}); }, 'down': function(evt){ form.setValues({y: Number(form.getValues().y)+1}); }, ignoreInputFields: true, scope: panel }); }, destroy: function() { delete TP.iconSettingsWindow; panel.stateful = true; if(!settingsWindow.skipRestore) { // if we cancel directly after adding a new icon, destroy it tab.enableMapControlsTemp(); if(panel.firstRun) { panel.destroy(); } else { if(panel.classChanged) { var key = panel.id; panel.redrawOnly = true; panel.destroy(); TP.timeouts['timeout_' + key + '_show_settings'] = window.setTimeout(function() { panel = TP.add_panlet({id:key, skip_state:true, tb:tab, autoshow:true}, false); TP.updateAllIcons(Ext.getCmp(panel.panel_id), panel.id); }, 50); return; } else { // restore position and layout if(panel.setRenderItem) { panel.setRenderItem(undefined, true); } if(TP.cp.state[panel.id]) { panel.applyXdata(TP.cp.state[panel.id].xdata); } } } } if(panel.el) { panel.el.dom.style.outline = ""; panel.setIconLabel(); } if(panel.dragEl1 && panel.dragEl1.el) { panel.dragEl1.el.dom.style.outline = ""; } if(panel.dragEl2 && panel.dragEl2.el) { panel.dragEl2.el.dom.style.outline = ""; } if(panel.labelEl && panel.labelEl.el) { panel.labelEl.el.dom.style.outline = ""; } TP.updateAllIcons(Ext.getCmp(panel.panel_id)); // workaround to put labels in front } } }).show(); tab.body.unmask(); TP.setIconSettingsValues(panel.xdata); TP.iconSettingsWindow = settingsWindow; // new mouseover tips while settings are open TP.iconTip.hide(); // move settings window next to panel itself var showAtPos = TP.getNextToPanelPos(panel, settingsWindow.width, settingsWindow.height); panel.setIconLabel(undefined, true); settingsWindow.showAt(showAtPos); TP.iconSettingsWindow.panel = panel; settingsWindow.renderUpdateDo = renderUpdateDo; renderUpdate = function(forceColor, forceRenderItem) { if(TP.skipRender) { return; } TP.reduceDelayEvents(TP.iconSettingsWindow, function() { if(TP.skipRender) { return; } if(!TP.iconSettingsWindow) { return; } TP.iconSettingsWindow.renderUpdateDo(forceColor, forceRenderItem); }, 100, 'timeout_settings_render_update'); }; settingsWindow.renderUpdate = renderUpdate; renderUpdate(); /* highlight current icon */ if(panel.xdata.appearance.type == "connector") { panel.dragEl1.el.dom.style.outline = "2px dotted orange"; panel.dragEl2.el.dom.style.outline = "2px dotted orange"; } else if (panel.iconType == "text") { panel.labelEl.el.dom.style.outline = "2px dotted orange"; } else { panel.el.dom.style.outline = "2px dotted orange"; } window.setTimeout(function() { TP.iconSettingsWindow.toFront(); }, 100); TP.modalWindows.push(settingsWindow); }; TP.get_icon_form_xdata = function(settingsWindow) { var xdata = { general: Ext.getCmp('generalForm').getForm().getValues(), layout: Ext.getCmp('layoutForm').getForm().getValues(), appearance: Ext.getCmp('appearanceForm').getForm().getValues(), link: Ext.getCmp('linkForm').getForm().getValues(), label: Ext.getCmp('labelForm').getForm().getValues() } // clean up if(xdata.label.labeltext == '') { delete xdata.label; } if(xdata.link.link == '') { delete xdata.link; } if(xdata.layout.rotation == 0) { delete xdata.layout.rotation; } Ext.getCmp('appearance_types').store.each(function(data, i) { var t = data.raw[0]; for(var key in xdata.appearance) { var t2 = t; if(t == 'speedometer') { t2 = 'speedo'; } var p = new RegExp('^'+t2, 'g'); if(key.match(p) && t != xdata.appearance.type) { delete xdata.appearance[key]; } } }); if(settingsWindow.panel.hideAppearanceTab) { delete xdata.appearance; } if(settingsWindow.panel.iconType == 'text') { delete xdata.general; } if(xdata.appearance) { delete xdata.appearance.speedoshadow; delete xdata.appearance.pieshadow; } if(xdata.general) { delete xdata.general.newcls; } return(xdata); } TP.openLabelEditorWindow = function(panel) { var oldValue = Ext.getCmp('label_textfield').getValue(); var perf_data = ''; window.perfdata = {}; // ensure fresh and correct performance data panel.setIconLabel(undefined, true); for(var key in perfdata) { delete perfdata[key].perf; delete perfdata[key].key; for(var key2 in perfdata[key]) { var keyname = '.'+key; if(key.match(/[^a-zA-Z]/)) { keyname = '[\''+key+'\']'; } perf_data += '<tr><td><\/td><td><i>perfdata'+keyname+'.'+key2+'<\/i><\/td><td>'+perfdata[key][key2]+'<\/td><\/tr>' } } var labelEditorWindow = new Ext.Window({ height: 500, width: 650, layout: 'fit', title: 'Label Editor', modal: true, buttonAlign: 'center', fbar: [/* panlet setting cancel button */ { xtype: 'button', text: 'cancel', handler: function(This) { var labelEditorWindow = This.up('window'); Ext.getCmp('label_textfield').setValue(oldValue); labelEditorWindow.destroy(); } }, /* panlet setting save button */ { xtype: 'button', text: 'save', handler: function(This) { var labelEditorWindow = This.up('window'); Ext.getCmp('label_textfield').setValue(labelEditorWindow.down('textarea').getValue()) labelEditorWindow.destroy(); } } ], items: [{ xtype: 'form', bodyPadding: 2, border: 0, bodyStyle: 'overflow-y: auto;', submitEmptyText: false, layout: 'anchor', defaults: { width: '99%', labelWidth: 40 }, items: [{ xtype: 'textarea', fieldLabel: 'Label', value: Ext.getCmp('label_textfield').getValue().replace(/<br>/g,"<br>\n"), id: 'label_textfield_edit', height: 90, listeners: { change: function(This) { Ext.getCmp('label_textfield').setValue(This.getValue()) } } }, { fieldLabel: 'Help', xtype: 'fieldcontainer', items: [{ xtype: 'label', cls: 'labelhelp', html: '<p>Use HTML to format your label<br>' +'Ex.: <i>Host &lt;b&gt;{{name}}&lt;/b&gt;<\/i>, Newlines: <i>&lt;br&gt;<\/i><\/p>' +'<p>It is possible to create dynamic labels with {{placeholders}}.<br>' +'Ex.: <i>Host {{name}}: {{plugin_output}}<\/i><\/p>' +'<p>You may also do calculations inside placeholders like this:<br>' +'Ex.: <i>Group XY {{totals.ok}}/{{totals.ok + totals.critical + totals.warning + totals.unknown}}<\/i><\/p>' +'<p>use sprintf to format numbers:<br>' +'Ex.: <i>{{sprintf("%.2f %s",perfdata.rta.val, perfdata.rta.unit)}}<\/i><\/p>' +'<p>use strftime to format timestamps:<br>' +'Ex.: <i>{{strftime("%Y-%m-%d",last_check)}}<\/i><\/p>' +'<p>conditionals are possible:<br>' +'Ex.: <i>{{ if(acknowledged) {...} else {...} }}<\/i><\/p>' +'<p>There are different variables available depending on the type of icon/widget:<br>' +'<table><tr><th>Groups/Filters:<\/th><td><i>totals.services.ok<\/i><\/td><td>totals number of ok services<\/td><\/tr>' +'<tr><td><\/td><td><i>totals.services.warning<\/i><\/td><td>totals number of warning services<\/td><\/tr>' +'<tr><td><\/td><td><i>totals.services.critical<\/i><\/td><td>totals number of critical services<\/td><\/tr>' +'<tr><td><\/td><td><i>totals.services.unknown<\/i><\/td><td>totals number of unknown services<\/td><\/tr>' +'<tr><td><\/td><td><i>totals.hosts.up<\/i><\/td><td>totals number of up hosts<\/td><\/tr>' +'<tr><td><\/td><td><i>totals.hosts.down<\/i><\/td><td>totals number of down hosts<\/td><\/tr>' +'<tr><td><\/td><td><i>totals.hosts.unreachable<\/i><\/td><td>totals number of unreachable hosts<\/td><\/tr>' +'<tr><th>Hosts:<\/th><td><i>name<\/i><\/td><td>Hostname<\/td><\/tr>' +'<tr><td><\/td><td><i>state<\/i><\/td><td>State: 0 - Ok, 1 - Warning, 2 - Critical,...<\/td><\/tr>' +'<tr><td><\/td><td><i>performance_data<\/i><\/td><td>Performance data. Use list below to access specific values<\/td><\/tr>' +'<tr><td><\/td><td><i>has_been_checked<\/i><\/td><td>Has this host been checked: 0 - No, 1 - Yes<\/td><\/tr>' +'<tr><td><\/td><td><i>scheduled_downtime_depth<\/i><\/td><td>Downtime: 0 - No, &gtl;=1 - Yes<\/td><\/tr>' +'<tr><td><\/td><td><i>acknowledged<\/i><\/td><td>Has this host been acknowledged: 0 - No, 1 - Yes<\/td><\/tr>' +'<tr><td><\/td><td><i>last_check<\/i><\/td><td>Timestamp of last check<\/td><\/tr>' +'<tr><td><\/td><td><i>last_state_change<\/i><\/td><td>Timestamp of last state change<\/td><\/tr>' +'<tr><td><\/td><td><i>last_notification<\/i><\/td><td>Timestamp of last notification<\/td><\/tr>' +'<tr><td><\/td><td><i>plugin_output<\/i><\/td><td>Plugin Output<\/td><\/tr>' +'<tr><th>Services:<\/th><td><i>host_name<\/i><\/td><td>Hostname<\/td><\/tr>' +'<tr><td><\/td><td><i>description<\/i><\/td><td>Servicename<\/td><\/tr>' +'<tr><td><\/td><td colspan=2>(other attributes are identical to hosts)<\/td><\/tr>' +'<tr><th>Performance Data:<\/th><td colspan=2>(available performance data with their current values)<\/td><\/tr>' +perf_data +'<tr><th>Availability Data:<\/th><td colspan=2><\/td><\/tr>' +'<tr><td><\/td><td><i>{{ sprintf("%.2f", availability({d: "60m"})) }}%<\/i><\/td><td>availability for the last 60 minutes<\/td><\/tr>' +'<tr><td><\/td><td><i>{{ sprintf("%.2f", availability({d: "24h"})) }}%<\/i><\/td><td>availability for the last 24 hours<\/td><\/tr>' +'<tr><td><\/td><td><i>{{ sprintf("%.2f", availability({d: "7d"})) }}%<\/i><\/td><td>availability for the last 7 days<\/td><\/tr>' +'<tr><td><\/td><td><i>{{ sprintf("%.2f", availability({d: "31d"})) }}%<\/i><\/td><td>availability for the last 31 days<\/td><\/tr>' +'<tr><td><\/td><td colspan=2><i>{{ sprintf("%.2f", availability({d: "24h", tm: "5x8"})) }}%<\/i><\/td><\/tr>' +'<tr><td><\/td><td><\/td><td>availability for the last 24 hours within given timeperiod<\/td><\/tr>' +'<\/table>', listeners: { afterrender: function(This) { var examples = This.el.dom.getElementsByTagName('i'); Ext.Array.each(examples, function(el, i) { el.className = "clickable"; el.onclick = function(i) { var cur = Ext.getCmp('label_textfield_edit').getValue(); var val = Ext.htmlDecode(el.innerHTML); if(!val.match(/\{\{.*?\}\}/) && (val.match(/^perfdata\./) || val.match(/^perfdata\[/) || val.match(/^totals\./) || val.match(/^avail\./) || val.match(/^[a-z_]+$/))) { val = '{{'+val+'}}'; } if(val.match(/<br>/)) { val += "\n"; } Ext.getCmp('label_textfield_edit').setValue(cur+val); Ext.getCmp('label_textfield_edit').up('form').body.dom.scrollTop=0; Ext.getCmp('label_textfield_edit').focus(); } }); } } }] }] }] }).show(); Ext.getCmp('label_textfield').setValue(" "); Ext.getCmp('label_textfield').setValue(Ext.getCmp('label_textfield_edit').getValue()); TP.modalWindows.push(labelEditorWindow); labelEditorWindow.toFront(); } TP.setIconSettingsValues = function(xdata) { xdata = TP.clone(xdata); // set some defaults if(!xdata.label) { xdata.label = { labeltext: '' }; } if(!xdata.label.fontsize) { xdata.label.fontsize = 14; } if(!xdata.label.bordersize) { xdata.label.bordersize = 1; } Ext.getCmp('generalForm').getForm().setValues(xdata.general); Ext.getCmp('layoutForm').getForm().setValues(xdata.layout); Ext.getCmp('appearanceForm').getForm().setValues(xdata.appearance); Ext.getCmp('linkForm').getForm().setValues(xdata.link); Ext.getCmp('labelForm').getForm().setValues(xdata.label); } TP.getNextToPanelPos = function(panel, width, height) { if(!panel || !panel.el) { return([0,0]); } var sizes = []; sizes.push(panel.getSize().width); if(panel.labelEl) { sizes.push(panel.labelEl.getSize().width); } sizes.push(180); // max size of new speedos var offsetLeft = 30; var offsetRight = Ext.Array.max(sizes) + 10; var offsetY = 40; var panelPos = panel.getPosition(); var viewPortSize = TP.viewport.getSize(); if(viewPortSize.width > panelPos[0] + width+offsetRight) { panelPos[0] = panelPos[0] + offsetRight; } else { panelPos[0] = panelPos[0] - width - offsetLeft; } if(panelPos[1] - 50 < 0) { panelPos[1] = offsetY; } else if(viewPortSize.height > panelPos[1] + height - offsetY) { panelPos[1] = panelPos[1] - offsetY; } else { panelPos[1] = viewPortSize.height - height - offsetY; } // make sure its on the screen if(panelPos[0] < 0) { panelPos[0] = 0; } if(panelPos[1] < 20) { panelPos[1] = 20; } return(panelPos); }<|fim▁end|>
f.hide(); } } });
<|file_name|>lfu_item.go<|end_file_name|><|fim▁begin|>package lfu type lfuItem struct { data interface{}<|fim▁hole|>func newlfuItem(data interface{}, parent *freqNode) *lfuItem { return &lfuItem{ data: data, parent: parent, } }<|fim▁end|>
parent *freqNode }
<|file_name|>processors.py<|end_file_name|><|fim▁begin|>try: from calais import Calais except ImportError: # pragma: no cover Calais = None # NOQA if Calais is not None: def process_calais(content, key): calais = Calais(key)<|fim▁hole|> people = [entity["name"] for entity in getattr(response, "entities", []) if entity["_type"] == "Person"] return {"people": people}<|fim▁end|>
response = calais.analyze(content)
<|file_name|>test_mistral_v2_policy.py<|end_file_name|><|fim▁begin|># Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.<|fim▁hole|> import mock import yaml from mistralclient.api.v2 import action_executions from mistralclient.api.v2 import executions from mistralclient.api.v2 import workflows from oslo_config import cfg # XXX: actionsensor import depends on config being setup. import st2tests.config as tests_config tests_config.parse_args() from mistral_v2 import MistralRunner import st2common from st2common.bootstrap import actionsregistrar from st2common.bootstrap import policiesregistrar from st2common.bootstrap import runnersregistrar from st2common.constants import action as action_constants from st2common.models.db.liveaction import LiveActionDB from st2common.persistence.liveaction import LiveAction from st2common.persistence.policy import Policy from st2common.runners import base as runners from st2common.services import action as action_service from st2common.transport.liveaction import LiveActionPublisher from st2common.transport.publishers import CUDPublisher from st2common.util import loader from st2tests import DbTestCase from st2tests import fixturesloader from st2tests.mocks.liveaction import MockLiveActionPublisher MISTRAL_RUNNER_NAME = 'mistral_v2' TEST_PACK = 'mistral_tests' TEST_PACK_PATH = fixturesloader.get_fixtures_packs_base_path() + '/' + TEST_PACK PACKS = [ TEST_PACK_PATH, fixturesloader.get_fixtures_packs_base_path() + '/core' ] # Non-workbook with a single workflow WF1_META_FILE_NAME = 'workflow_v2.yaml' WF1_META_FILE_PATH = TEST_PACK_PATH + '/actions/' + WF1_META_FILE_NAME WF1_META_CONTENT = loader.load_meta_file(WF1_META_FILE_PATH) WF1_NAME = WF1_META_CONTENT['pack'] + '.' + WF1_META_CONTENT['name'] WF1_ENTRY_POINT = TEST_PACK_PATH + '/actions/' + WF1_META_CONTENT['entry_point'] WF1_ENTRY_POINT_X = WF1_ENTRY_POINT.replace(WF1_META_FILE_NAME, 'xformed_' + WF1_META_FILE_NAME) WF1_SPEC = yaml.safe_load(MistralRunner.get_workflow_definition(WF1_ENTRY_POINT_X)) WF1_YAML = yaml.safe_dump(WF1_SPEC, default_flow_style=False) WF1 = workflows.Workflow(None, {'name': WF1_NAME, 'definition': WF1_YAML}) MISTRAL_EXECUTION = {'id': str(uuid.uuid4()), 'state': 'RUNNING', 'workflow_name': WF1_NAME} WF1_EXEC = copy.deepcopy(MISTRAL_EXECUTION) @mock.patch.object( CUDPublisher, 'publish_update', mock.MagicMock(return_value=None)) @mock.patch.object( CUDPublisher, 'publish_create', mock.MagicMock(side_effect=MockLiveActionPublisher.publish_create)) @mock.patch.object( LiveActionPublisher, 'publish_state', mock.MagicMock(side_effect=MockLiveActionPublisher.publish_state)) class MistralRunnerPolicyTest(DbTestCase): @classmethod def setUpClass(cls): super(MistralRunnerPolicyTest, cls).setUpClass() # Override the retry configuration here otherwise st2tests.config.parse_args # in DbTestCase.setUpClass will reset these overrides. cfg.CONF.set_override('retry_exp_msec', 100, group='mistral') cfg.CONF.set_override('retry_exp_max_msec', 200, group='mistral') cfg.CONF.set_override('retry_stop_max_msec', 200, group='mistral') cfg.CONF.set_override('api_url', 'http://0.0.0.0:9101', group='auth') def setUp(self): super(MistralRunnerPolicyTest, self).setUp() # Start with a clean database for each test. self._establish_connection_and_re_create_db() # Register runners. runnersregistrar.register_runners() actions_registrar = actionsregistrar.ActionsRegistrar( use_pack_cache=False, fail_on_failure=True ) for pack in PACKS: actions_registrar.register_from_pack(pack) # Register policies required for the tests. policiesregistrar.register_policy_types(st2common) policies_registrar = policiesregistrar.PolicyRegistrar( use_pack_cache=False, fail_on_failure=True ) for pack in PACKS: policies_registrar.register_from_pack(pack) @classmethod def get_runner_class(cls, runner_name): return runners.get_runner(runner_name).__class__ def _drop_all_other_policies(self, test_policy): policy_dbs = [policy_db for policy_db in Policy.get_all() if policy_db.ref != test_policy] for policy_db in policy_dbs: Policy.delete(policy_db, publish=False) @mock.patch.object( workflows.WorkflowManager, 'list', mock.MagicMock(return_value=[])) @mock.patch.object( workflows.WorkflowManager, 'get', mock.MagicMock(return_value=WF1)) @mock.patch.object( workflows.WorkflowManager, 'create', mock.MagicMock(return_value=[WF1])) @mock.patch.object( executions.ExecutionManager, 'create', mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC))) @mock.patch.object( action_executions.ActionExecutionManager, 'update', mock.MagicMock(return_value=None)) def test_cancel_on_task_action_concurrency(self): # Delete other policies in the test pack to avoid conflicts. required_policy = 'mistral_tests.cancel_on_concurrency' self._drop_all_other_policies(required_policy) # Get threshold from the policy. policy = Policy.get_by_ref(required_policy) threshold = policy.parameters.get('threshold', 0) self.assertGreater(threshold, 0) # Launch instances of the workflow up to threshold. for i in range(0, threshold): liveaction = LiveActionDB(action=WF1_NAME, parameters={'friend': 'friend' + str(i)}) liveaction, execution1 = action_service.request(liveaction) liveaction = LiveAction.get_by_id(str(liveaction.id)) self.assertEqual(liveaction.status, action_constants.LIVEACTION_STATUS_RUNNING) # Check number of running instances running = LiveAction.count( action=WF1_NAME, status=action_constants.LIVEACTION_STATUS_RUNNING) self.assertEqual(running, threshold) # Mock the mistral runner cancel method to assert cancel is called. mistral_runner_cls = self.get_runner_class('mistral_v2') with mock.patch.object(mistral_runner_cls, 'cancel', mock.MagicMock(return_value=None)): # Launch another instance of the workflow with mistral callback defined # to indicate that this is executed under a workflow. callback = { 'source': MISTRAL_RUNNER_NAME, 'url': 'http://127.0.0.1:8989/v2/action_executions/12345' } params = {'friend': 'grande animalerie'} liveaction2 = LiveActionDB(action=WF1_NAME, parameters=params, callback=callback) liveaction2, execution2 = action_service.request(liveaction2) action_executions.ActionExecutionManager.update.assert_called_once_with( '12345', output='{"error": "Execution canceled by user."}', state='CANCELLED' ) liveaction2 = LiveAction.get_by_id(str(liveaction2.id)) self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_CANCELED) # Assert cancel has been called. mistral_runner_cls.cancel.assert_called_once_with() @mock.patch.object( workflows.WorkflowManager, 'list', mock.MagicMock(return_value=[])) @mock.patch.object( workflows.WorkflowManager, 'get', mock.MagicMock(return_value=WF1)) @mock.patch.object( workflows.WorkflowManager, 'create', mock.MagicMock(return_value=[WF1])) @mock.patch.object( executions.ExecutionManager, 'create', mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC))) @mock.patch.object( action_executions.ActionExecutionManager, 'update', mock.MagicMock(return_value=None)) def test_cancel_on_task_action_concurrency_by_attr(self): # Delete other policies in the test pack to avoid conflicts. required_policy = 'mistral_tests.cancel_on_concurrency_by_attr' self._drop_all_other_policies(required_policy) # Get threshold from the policy. policy = Policy.get_by_ref(required_policy) threshold = policy.parameters.get('threshold', 0) self.assertGreater(threshold, 0) params = {'friend': 'grande animalerie'} # Launch instances of the workflow up to threshold. for i in range(0, threshold): liveaction = LiveActionDB(action=WF1_NAME, parameters=params) liveaction, execution1 = action_service.request(liveaction) liveaction = LiveAction.get_by_id(str(liveaction.id)) self.assertEqual(liveaction.status, action_constants.LIVEACTION_STATUS_RUNNING) # Check number of running instances running = LiveAction.count( action=WF1_NAME, status=action_constants.LIVEACTION_STATUS_RUNNING, parameters__friend=params['friend']) self.assertEqual(running, threshold) # Mock the mistral runner cancel method to assert cancel is called. mistral_runner_cls = self.get_runner_class('mistral_v2') with mock.patch.object(mistral_runner_cls, 'cancel', mock.MagicMock(return_value=None)): # Launch another instance of the workflow with mistral callback defined # to indicate that this is executed under a workflow. callback = { 'source': MISTRAL_RUNNER_NAME, 'url': 'http://127.0.0.1:8989/v2/action_executions/12345' } liveaction2 = LiveActionDB(action=WF1_NAME, parameters=params, callback=callback) liveaction2, execution2 = action_service.request(liveaction2) action_executions.ActionExecutionManager.update.assert_called_once_with( '12345', output='{"error": "Execution canceled by user."}', state='CANCELLED' ) liveaction2 = LiveAction.get_by_id(str(liveaction2.id)) self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_CANCELED) # Assert cancel has been called. mistral_runner_cls.cancel.assert_called_once_with()<|fim▁end|>
import copy import uuid
<|file_name|>_Cisco_IOS_XR_infra_objmgr_oper.py<|end_file_name|><|fim▁begin|>import re import collections from enum import Enum from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION, ANYXML_CLASS from ydk.errors import YPYError, YPYModelError from ydk.providers._importer import _yang_ns _meta_table = { 'EndPortEnum' : _MetaInfoEnum('EndPortEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', { 'echo':'echo', 'discard':'discard', 'daytime':'daytime', 'chargen':'chargen', 'ftp-data':'ftp_data', 'ftp':'ftp', 'ssh':'ssh', 'telnet':'telnet', 'smtp':'smtp', 'time':'time', 'nicname':'nicname', 'tacacs':'tacacs', 'domain':'domain', 'gopher':'gopher', 'finger':'finger', 'www':'www', 'host-name':'host_name', 'pop2':'pop2', 'pop3':'pop3', 'sun-rpc':'sun_rpc', 'ident':'ident', 'nntp':'nntp', 'bgp':'bgp', 'irc':'irc', 'pim-auto-rp':'pim_auto_rp', 'exec':'exec_', 'login':'login', 'cmd':'cmd', 'lpd':'lpd', 'uucp':'uucp', 'klogin':'klogin', 'kshell':'kshell', 'talk':'talk', 'ldp':'ldp', }, 'Cisco-IOS-XR-infra-objmgr-oper', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper']), 'PortEnum' : _MetaInfoEnum('PortEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', { 'echo':'echo', 'discard':'discard', 'daytime':'daytime', 'chargen':'chargen', 'ftp-data':'ftp_data', 'ftp':'ftp', 'ssh':'ssh', 'telnet':'telnet', 'smtp':'smtp', 'time':'time', 'nicname':'nicname', 'tacacs':'tacacs', 'domain':'domain', 'gopher':'gopher', 'finger':'finger', 'www':'www', 'host-name':'host_name', 'pop2':'pop2', 'pop3':'pop3', 'sun-rpc':'sun_rpc', 'ident':'ident', 'nntp':'nntp', 'bgp':'bgp', 'irc':'irc', 'pim-auto-rp':'pim_auto_rp', 'exec':'exec_', 'login':'login', 'cmd':'cmd', 'lpd':'lpd', 'uucp':'uucp', 'klogin':'klogin', 'kshell':'kshell', 'talk':'talk', 'ldp':'ldp', }, 'Cisco-IOS-XR-infra-objmgr-oper', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper']), 'PortOperatorEnum' : _MetaInfoEnum('PortOperatorEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', { 'equal':'equal', 'not-equal':'not_equal', 'greater-than':'greater_than', 'less-than':'less_than', }, 'Cisco-IOS-XR-infra-objmgr-oper', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper']), 'StartPortEnum' : _MetaInfoEnum('StartPortEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', { 'echo':'echo', 'discard':'discard', 'daytime':'daytime', 'chargen':'chargen', 'ftp-data':'ftp_data', 'ftp':'ftp', 'ssh':'ssh', 'telnet':'telnet', 'smtp':'smtp', 'time':'time', 'nicname':'nicname', 'tacacs':'tacacs', 'domain':'domain', 'gopher':'gopher', 'finger':'finger', 'www':'www', 'host-name':'host_name', 'pop2':'pop2', 'pop3':'pop3', 'sun-rpc':'sun_rpc', 'ident':'ident', 'nntp':'nntp', 'bgp':'bgp', 'irc':'irc', 'pim-auto-rp':'pim_auto_rp', 'exec':'exec_', 'login':'login', 'cmd':'cmd', 'lpd':'lpd', 'uucp':'uucp', 'klogin':'klogin', 'kshell':'kshell', 'talk':'talk', 'ldp':'ldp', }, 'Cisco-IOS-XR-infra-objmgr-oper', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper']), 'ObjectGroup.Port.Objects.Object.NestedGroups.NestedGroup' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects.Object.NestedGroups.NestedGroup', False, [ _MetaInfoClassMember('nested-group-name', ATTRIBUTE, 'str' , None, None, [(1, 64)], [], ''' Nested object group ''', 'nested_group_name', 'Cisco-IOS-XR-infra-objmgr-oper', True), _MetaInfoClassMember('nested-group-name-xr', ATTRIBUTE, 'str' , None, None, [], [], ''' Nested group ''', 'nested_group_name_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'nested-group', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Port.Objects.Object.NestedGroups' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects.Object.NestedGroups', False, [ _MetaInfoClassMember('nested-group', REFERENCE_LIST, 'NestedGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Port.Objects.Object.NestedGroups.NestedGroup', [], [], ''' nested object group ''', 'nested_group', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'nested-groups', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Port.Objects.Object.Operators.Operator' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects.Object.Operators.Operator', False, [ _MetaInfoClassMember('operator-type', REFERENCE_ENUM_CLASS, 'PortOperatorEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'PortOperatorEnum', [], [], ''' operation for ports ''', 'operator_type', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('operator-type-xr', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Operator<|fim▁hole|> ''', 'operator_type_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('port', REFERENCE_UNION, 'str' , None, None, [], [], ''' Port number ''', 'port', 'Cisco-IOS-XR-infra-objmgr-oper', False, [ _MetaInfoClassMember('port', REFERENCE_ENUM_CLASS, 'PortEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'PortEnum', [], [], ''' Port number ''', 'port', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('port', ATTRIBUTE, 'int' , None, None, [('0', '65535')], [], ''' Port number ''', 'port', 'Cisco-IOS-XR-infra-objmgr-oper', False), ]), _MetaInfoClassMember('port-xr', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Port ''', 'port_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'operator', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Port.Objects.Object.Operators' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects.Object.Operators', False, [ _MetaInfoClassMember('operator', REFERENCE_LIST, 'Operator' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Port.Objects.Object.Operators.Operator', [], [], ''' op class ''', 'operator', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'operators', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Port.Objects.Object.PortRanges.PortRange' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects.Object.PortRanges.PortRange', False, [ _MetaInfoClassMember('end-port', REFERENCE_UNION, 'str' , None, None, [], [], ''' End port number ''', 'end_port', 'Cisco-IOS-XR-infra-objmgr-oper', False, [ _MetaInfoClassMember('end-port', REFERENCE_ENUM_CLASS, 'EndPortEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'EndPortEnum', [], [], ''' End port number ''', 'end_port', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('end-port', ATTRIBUTE, 'int' , None, None, [('0', '65535')], [], ''' End port number ''', 'end_port', 'Cisco-IOS-XR-infra-objmgr-oper', False), ]), _MetaInfoClassMember('end-port-xr', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Port end address ''', 'end_port_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('start-port', REFERENCE_UNION, 'str' , None, None, [], [], ''' Start port number ''', 'start_port', 'Cisco-IOS-XR-infra-objmgr-oper', False, [ _MetaInfoClassMember('start-port', REFERENCE_ENUM_CLASS, 'StartPortEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'StartPortEnum', [], [], ''' Start port number ''', 'start_port', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('start-port', ATTRIBUTE, 'int' , None, None, [('0', '65535')], [], ''' Start port number ''', 'start_port', 'Cisco-IOS-XR-infra-objmgr-oper', False), ]), _MetaInfoClassMember('start-port-xr', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Port start address ''', 'start_port_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'port-range', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Port.Objects.Object.PortRanges' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects.Object.PortRanges', False, [ _MetaInfoClassMember('port-range', REFERENCE_LIST, 'PortRange' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Port.Objects.Object.PortRanges.PortRange', [], [], ''' Match only packets on a given port range ''', 'port_range', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'port-ranges', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Port.Objects.Object.ParentGroups.ParentGroup' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects.Object.ParentGroups.ParentGroup', False, [ _MetaInfoClassMember('parent-group-name', ATTRIBUTE, 'str' , None, None, [(1, 64)], [], ''' Nested object group ''', 'parent_group_name', 'Cisco-IOS-XR-infra-objmgr-oper', True), _MetaInfoClassMember('parent-name', ATTRIBUTE, 'str' , None, None, [], [], ''' Parent node ''', 'parent_name', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'parent-group', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Port.Objects.Object.ParentGroups' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects.Object.ParentGroups', False, [ _MetaInfoClassMember('parent-group', REFERENCE_LIST, 'ParentGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Port.Objects.Object.ParentGroups.ParentGroup', [], [], ''' Parent object group ''', 'parent_group', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'parent-groups', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Port.Objects.Object' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects.Object', False, [ _MetaInfoClassMember('object-name', ATTRIBUTE, 'str' , None, None, [(1, 64)], [], ''' Port object group name ''', 'object_name', 'Cisco-IOS-XR-infra-objmgr-oper', True), _MetaInfoClassMember('nested-groups', REFERENCE_CLASS, 'NestedGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Port.Objects.Object.NestedGroups', [], [], ''' Table of NestedGroup ''', 'nested_groups', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('operators', REFERENCE_CLASS, 'Operators' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Port.Objects.Object.Operators', [], [], ''' Table of Operator ''', 'operators', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('parent-groups', REFERENCE_CLASS, 'ParentGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Port.Objects.Object.ParentGroups', [], [], ''' Table of ParentGroup ''', 'parent_groups', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('port-ranges', REFERENCE_CLASS, 'PortRanges' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Port.Objects.Object.PortRanges', [], [], ''' Table of PortRange ''', 'port_ranges', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'object', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Port.Objects' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Port.Objects', False, [ _MetaInfoClassMember('object', REFERENCE_LIST, 'Object' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Port.Objects.Object', [], [], ''' Port object group ''', 'object', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'objects', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Port' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Port', False, [ _MetaInfoClassMember('objects', REFERENCE_CLASS, 'Objects' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Port.Objects', [], [], ''' Table of Object ''', 'objects', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'port', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups.NestedGroup' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups.NestedGroup', False, [ _MetaInfoClassMember('nested-group-name', ATTRIBUTE, 'str' , None, None, [(1, 64)], [], ''' Enter the name of a nested object group ''', 'nested_group_name', 'Cisco-IOS-XR-infra-objmgr-oper', True), _MetaInfoClassMember('nested-group-name-xr', ATTRIBUTE, 'str' , None, None, [], [], ''' Nested group ''', 'nested_group_name_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'nested-group', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups', False, [ _MetaInfoClassMember('nested-group', REFERENCE_LIST, 'NestedGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups.NestedGroup', [], [], ''' nested object group ''', 'nested_group', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'nested-groups', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6.Objects.Object.Addresses.Address' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6.Objects.Object.Addresses.Address', False, [ _MetaInfoClassMember('prefix', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' IPv6 prefix x:x::x/y ''', 'prefix', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('prefix-length', ATTRIBUTE, 'int' , None, None, [('0', '128')], [], ''' Prefix of the IP Address ''', 'prefix_length', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('prefix-length-xr', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Prefix length ''', 'prefix_length_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('prefix-xr', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' IPv4 Address ''', 'prefix_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'address', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6.Objects.Object.Addresses' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6.Objects.Object.Addresses', False, [ _MetaInfoClassMember('address', REFERENCE_LIST, 'Address' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6.Objects.Object.Addresses.Address', [], [], ''' IPv6 address ''', 'address', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'addresses', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges.AddressRange' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges.AddressRange', False, [ _MetaInfoClassMember('end-address', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' IPv6 address ''', 'end_address', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('end-address-xr', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' Range end address ''', 'end_address_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('start-address', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' IPv6 address ''', 'start_address', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('start-address-xr', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' Range start address ''', 'start_address_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'address-range', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges', False, [ _MetaInfoClassMember('address-range', REFERENCE_LIST, 'AddressRange' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges.AddressRange', [], [], ''' Range of host addresses ''', 'address_range', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'address-ranges', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups.ParentGroup' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups.ParentGroup', False, [ _MetaInfoClassMember('parent-group-name', ATTRIBUTE, 'str' , None, None, [(1, 64)], [], ''' Nested object group ''', 'parent_group_name', 'Cisco-IOS-XR-infra-objmgr-oper', True), _MetaInfoClassMember('parent-name', ATTRIBUTE, 'str' , None, None, [], [], ''' Parent node ''', 'parent_name', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'parent-group', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups', False, [ _MetaInfoClassMember('parent-group', REFERENCE_LIST, 'ParentGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups.ParentGroup', [], [], ''' Parent object group ''', 'parent_group', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'parent-groups', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6.Objects.Object.Hosts.Host' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6.Objects.Object.Hosts.Host', False, [ _MetaInfoClassMember('host-address', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' host ipv6 address ''', 'host_address', 'Cisco-IOS-XR-infra-objmgr-oper', True), _MetaInfoClassMember('host-address-xr', ATTRIBUTE, 'str' , None, None, [], [b'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'], ''' Host address ''', 'host_address_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'host', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6.Objects.Object.Hosts' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6.Objects.Object.Hosts', False, [ _MetaInfoClassMember('host', REFERENCE_LIST, 'Host' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6.Objects.Object.Hosts.Host', [], [], ''' A single host address ''', 'host', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'hosts', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6.Objects.Object' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6.Objects.Object', False, [ _MetaInfoClassMember('object-name', ATTRIBUTE, 'str' , None, None, [(1, 64)], [], ''' IPv6 object group name - maximum 64 characters ''', 'object_name', 'Cisco-IOS-XR-infra-objmgr-oper', True), _MetaInfoClassMember('address-ranges', REFERENCE_CLASS, 'AddressRanges' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges', [], [], ''' Table of AddressRange ''', 'address_ranges', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('addresses', REFERENCE_CLASS, 'Addresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6.Objects.Object.Addresses', [], [], ''' Table of Address ''', 'addresses', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('hosts', REFERENCE_CLASS, 'Hosts' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6.Objects.Object.Hosts', [], [], ''' Table of Host ''', 'hosts', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('nested-groups', REFERENCE_CLASS, 'NestedGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups', [], [], ''' Table of NestedGroup ''', 'nested_groups', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('parent-groups', REFERENCE_CLASS, 'ParentGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups', [], [], ''' Table of parent object group ''', 'parent_groups', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'object', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6.Objects' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6.Objects', False, [ _MetaInfoClassMember('object', REFERENCE_LIST, 'Object' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6.Objects.Object', [], [], ''' IPv6 object group ''', 'object', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'objects', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv6' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv6', False, [ _MetaInfoClassMember('objects', REFERENCE_CLASS, 'Objects' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6.Objects', [], [], ''' Table of Object ''', 'objects', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'ipv6', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups.NestedGroup' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups.NestedGroup', False, [ _MetaInfoClassMember('nested-group-name', ATTRIBUTE, 'str' , None, None, [(1, 64)], [], ''' Nested object group ''', 'nested_group_name', 'Cisco-IOS-XR-infra-objmgr-oper', True), _MetaInfoClassMember('nested-group-name-xr', ATTRIBUTE, 'str' , None, None, [], [], ''' Nested group ''', 'nested_group_name_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'nested-group', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups', False, [ _MetaInfoClassMember('nested-group', REFERENCE_LIST, 'NestedGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups.NestedGroup', [], [], ''' Nested object group ''', 'nested_group', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'nested-groups', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4.Objects.Object.Addresses.Address' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4.Objects.Object.Addresses.Address', False, [ _MetaInfoClassMember('prefix', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' IPv4 address/prefix ''', 'prefix', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('prefix-length', ATTRIBUTE, 'int' , None, None, [('0', '32')], [], ''' Prefix of the IP Address ''', 'prefix_length', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('prefix-length-xr', ATTRIBUTE, 'int' , None, None, [('0', '4294967295')], [], ''' Prefix length ''', 'prefix_length_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('prefix-xr', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' IPv4 Address ''', 'prefix_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'address', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4.Objects.Object.Addresses' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4.Objects.Object.Addresses', False, [ _MetaInfoClassMember('address', REFERENCE_LIST, 'Address' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4.Objects.Object.Addresses.Address', [], [], ''' IPv4 address ''', 'address', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'addresses', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges.AddressRange' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges.AddressRange', False, [ _MetaInfoClassMember('end-address', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' IPv4 address ''', 'end_address', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('end-address-xr', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' Range end address ''', 'end_address_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('start-address', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' IPv4 address ''', 'start_address', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('start-address-xr', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' Range start address ''', 'start_address_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'address-range', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges', False, [ _MetaInfoClassMember('address-range', REFERENCE_LIST, 'AddressRange' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges.AddressRange', [], [], ''' Range of host addresses ''', 'address_range', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'address-ranges', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups.ParentGroup' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups.ParentGroup', False, [ _MetaInfoClassMember('parent-group-name', ATTRIBUTE, 'str' , None, None, [(1, 64)], [], ''' Nested object group ''', 'parent_group_name', 'Cisco-IOS-XR-infra-objmgr-oper', True), _MetaInfoClassMember('parent-name', ATTRIBUTE, 'str' , None, None, [], [], ''' Parent node ''', 'parent_name', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'parent-group', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups', False, [ _MetaInfoClassMember('parent-group', REFERENCE_LIST, 'ParentGroup' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups.ParentGroup', [], [], ''' Parent object group ''', 'parent_group', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'parent-groups', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4.Objects.Object.Hosts.Host' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4.Objects.Object.Hosts.Host', False, [ _MetaInfoClassMember('host-address', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' Host ipv4 address ''', 'host_address', 'Cisco-IOS-XR-infra-objmgr-oper', True), _MetaInfoClassMember('host-address-xr', ATTRIBUTE, 'str' , None, None, [], [b'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'], ''' Host address ''', 'host_address_xr', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'host', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4.Objects.Object.Hosts' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4.Objects.Object.Hosts', False, [ _MetaInfoClassMember('host', REFERENCE_LIST, 'Host' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4.Objects.Object.Hosts.Host', [], [], ''' A single host address ''', 'host', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'hosts', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4.Objects.Object' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4.Objects.Object', False, [ _MetaInfoClassMember('object-name', ATTRIBUTE, 'str' , None, None, [(1, 64)], [], ''' IPv4 object group name - maximum 64 characters ''', 'object_name', 'Cisco-IOS-XR-infra-objmgr-oper', True), _MetaInfoClassMember('address-ranges', REFERENCE_CLASS, 'AddressRanges' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges', [], [], ''' Table of AddressRange ''', 'address_ranges', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('addresses', REFERENCE_CLASS, 'Addresses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4.Objects.Object.Addresses', [], [], ''' Table of Address ''', 'addresses', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('hosts', REFERENCE_CLASS, 'Hosts' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4.Objects.Object.Hosts', [], [], ''' Table of Host ''', 'hosts', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('nested-groups', REFERENCE_CLASS, 'NestedGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups', [], [], ''' Table of NestedGroup ''', 'nested_groups', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('parent-groups', REFERENCE_CLASS, 'ParentGroups' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups', [], [], ''' Table of parent object group ''', 'parent_groups', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'object', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4.Objects' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4.Objects', False, [ _MetaInfoClassMember('object', REFERENCE_LIST, 'Object' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4.Objects.Object', [], [], ''' IPv4 object group ''', 'object', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'objects', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network.Ipv4' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network.Ipv4', False, [ _MetaInfoClassMember('objects', REFERENCE_CLASS, 'Objects' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4.Objects', [], [], ''' Table of Object ''', 'objects', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'ipv4', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup.Network' : { 'meta_info' : _MetaInfoClass('ObjectGroup.Network', False, [ _MetaInfoClassMember('ipv4', REFERENCE_CLASS, 'Ipv4' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv4', [], [], ''' IPv4 object group ''', 'ipv4', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('ipv6', REFERENCE_CLASS, 'Ipv6' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network.Ipv6', [], [], ''' IPv6 object group ''', 'ipv6', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'network', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, 'ObjectGroup' : { 'meta_info' : _MetaInfoClass('ObjectGroup', False, [ _MetaInfoClassMember('network', REFERENCE_CLASS, 'Network' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Network', [], [], ''' Network object group ''', 'network', 'Cisco-IOS-XR-infra-objmgr-oper', False), _MetaInfoClassMember('port', REFERENCE_CLASS, 'Port' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper', 'ObjectGroup.Port', [], [], ''' Port object group ''', 'port', 'Cisco-IOS-XR-infra-objmgr-oper', False), ], 'Cisco-IOS-XR-infra-objmgr-oper', 'object-group', _yang_ns._namespaces['Cisco-IOS-XR-infra-objmgr-oper'], 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_objmgr_oper' ), }, } _meta_table['ObjectGroup.Port.Objects.Object.NestedGroups.NestedGroup']['meta_info'].parent =_meta_table['ObjectGroup.Port.Objects.Object.NestedGroups']['meta_info'] _meta_table['ObjectGroup.Port.Objects.Object.Operators.Operator']['meta_info'].parent =_meta_table['ObjectGroup.Port.Objects.Object.Operators']['meta_info'] _meta_table['ObjectGroup.Port.Objects.Object.PortRanges.PortRange']['meta_info'].parent =_meta_table['ObjectGroup.Port.Objects.Object.PortRanges']['meta_info'] _meta_table['ObjectGroup.Port.Objects.Object.ParentGroups.ParentGroup']['meta_info'].parent =_meta_table['ObjectGroup.Port.Objects.Object.ParentGroups']['meta_info'] _meta_table['ObjectGroup.Port.Objects.Object.NestedGroups']['meta_info'].parent =_meta_table['ObjectGroup.Port.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Port.Objects.Object.Operators']['meta_info'].parent =_meta_table['ObjectGroup.Port.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Port.Objects.Object.PortRanges']['meta_info'].parent =_meta_table['ObjectGroup.Port.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Port.Objects.Object.ParentGroups']['meta_info'].parent =_meta_table['ObjectGroup.Port.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Port.Objects.Object']['meta_info'].parent =_meta_table['ObjectGroup.Port.Objects']['meta_info'] _meta_table['ObjectGroup.Port.Objects']['meta_info'].parent =_meta_table['ObjectGroup.Port']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups.NestedGroup']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6.Objects.Object.Addresses.Address']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv6.Objects.Object.Addresses']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges.AddressRange']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups.ParentGroup']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6.Objects.Object.Hosts.Host']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv6.Objects.Object.Hosts']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6.Objects.Object.NestedGroups']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv6.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6.Objects.Object.Addresses']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv6.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6.Objects.Object.AddressRanges']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv6.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6.Objects.Object.ParentGroups']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv6.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6.Objects.Object.Hosts']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv6.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6.Objects.Object']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv6.Objects']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6.Objects']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv6']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups.NestedGroup']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4.Objects.Object.Addresses.Address']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv4.Objects.Object.Addresses']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges.AddressRange']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups.ParentGroup']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4.Objects.Object.Hosts.Host']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv4.Objects.Object.Hosts']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4.Objects.Object.NestedGroups']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv4.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4.Objects.Object.Addresses']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv4.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4.Objects.Object.AddressRanges']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv4.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4.Objects.Object.ParentGroups']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv4.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4.Objects.Object.Hosts']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv4.Objects.Object']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4.Objects.Object']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv4.Objects']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4.Objects']['meta_info'].parent =_meta_table['ObjectGroup.Network.Ipv4']['meta_info'] _meta_table['ObjectGroup.Network.Ipv6']['meta_info'].parent =_meta_table['ObjectGroup.Network']['meta_info'] _meta_table['ObjectGroup.Network.Ipv4']['meta_info'].parent =_meta_table['ObjectGroup.Network']['meta_info'] _meta_table['ObjectGroup.Port']['meta_info'].parent =_meta_table['ObjectGroup']['meta_info'] _meta_table['ObjectGroup.Network']['meta_info'].parent =_meta_table['ObjectGroup']['meta_info']<|fim▁end|>
<|file_name|>sf_snapshot_schedule_manager.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # (c) 2017, NetApp, Inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' module: sf_snapshot_schedule_manager short_description: Manage SolidFire snapshot schedules extends_documentation_fragment: - netapp.solidfire version_added: '2.3' author: Sumit Kumar ([email protected]) description: - Create, destroy, or update accounts on SolidFire options: state: description: - Whether the specified schedule should exist or not. required: true choices: ['present', 'absent'] paused: description: - Pause / Resume a schedule. required: false recurring: description: - Should the schedule recur? required: false time_interval_days: description: Time interval in days. required: false default: 1 time_interval_hours: description: Time interval in hours. required: false default: 0 time_interval_minutes: description: Time interval in minutes. required: false default: 0 name: description: - Name for the snapshot schedule. required: true snapshot_name: description: - Name for the created snapshots. required: false volumes: description: - Volume IDs that you want to set the snapshot schedule for. - At least 1 volume ID is required for creating a new schedule. - required when C(state=present) required: false retention: description: - Retention period for the snapshot. - Format is 'HH:mm:ss'. required: false schedule_id: description: - The schedule ID for the schedule that you want to update or delete. required: false starting_date: description: - Starting date for the schedule. - Required when C(state=present). - Please use two '-' in the above format, or you may see an error- TypeError, is not JSON serializable description. - "Format: C(2016--12--01T00:00:00Z)" required: false ''' EXAMPLES = """ - name: Create Snapshot schedule sf_snapshot_schedule_manager: hostname: "{{ solidfire_hostname }}" username: "{{ solidfire_username }}" password: "{{ solidfire_password }}" state: present name: Schedule_A time_interval_days: 1 starting_date: 2016--12--01T00:00:00Z volumes: 7 - name: Update Snapshot schedule sf_snapshot_schedule_manager: hostname: "{{ solidfire_hostname }}" username: "{{ solidfire_username }}" password: "{{ solidfire_password }}" state: present schedule_id: 6 recurring: True snapshot_name: AnsibleSnapshots - name: Delete Snapshot schedule sf_snapshot_schedule_manager: hostname: "{{ solidfire_hostname }}" username: "{{ solidfire_username }}" password: "{{ solidfire_password }}" state: absent schedule_id: 6 """ RETURN = """ schedule_id: description: Schedule ID of the newly created schedule returned: success type: string """ import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native import ansible.module_utils.netapp as netapp_utils HAS_SF_SDK = netapp_utils.has_sf_sdk() class SolidFireSnapShotSchedule(object): def __init__(self): self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() self.argument_spec.update(dict( state=dict(required=True, choices=['present', 'absent']), name=dict(required=True, type='str'), time_interval_days=dict(required=False, type='int', default=1), time_interval_hours=dict(required=False, type='int', default=0), time_interval_minutes=dict(required=False, type='int', default=0), paused=dict(required=False, type='bool'), recurring=dict(required=False, type='bool'), starting_date=dict(type='str'), snapshot_name=dict(required=False, type='str'), volumes=dict(required=False, type='list'), retention=dict(required=False, type='str'), schedule_id=dict(type='int'), )) self.module = AnsibleModule( argument_spec=self.argument_spec, required_if=[ ('state', 'present', ['starting_date', 'volumes']) ], supports_check_mode=True ) p = self.module.params # set up state variables self.state = p['state'] self.name = p['name'] # self.interval = p['interval'] self.time_interval_days = p['time_interval_days'] self.time_interval_hours = p['time_interval_hours'] self.time_interval_minutes = p['time_interval_minutes'] self.paused = p['paused'] self.recurring = p['recurring'] self.starting_date = p['starting_date'] if self.starting_date is not None: self.starting_date = self.starting_date.replace("--", "-") self.snapshot_name = p['snapshot_name'] self.volumes = p['volumes'] self.retention = p['retention'] self.schedule_id = p['schedule_id'] self.create_schedule_result = None if HAS_SF_SDK is False: self.module.fail_json(msg="Unable to import the SolidFire Python SDK") else: self.sfe = netapp_utils.create_sf_connection(module=self.module) def get_schedule(self): schedule_list = self.sfe.list_schedules() for schedule in schedule_list.schedules: if schedule.name == self.name: # Update self.schedule_id: if self.schedule_id is not None: if schedule.schedule_id == self.schedule_id: return schedule else: self.schedule_id = schedule.schedule_id return schedule return None def create_schedule(self): try: sched = netapp_utils.Schedule() # if self.interval == 'time_interval': sched.frequency = netapp_utils.TimeIntervalFrequency(days=self.time_interval_days, hours=self.time_interval_hours, minutes=self.time_interval_minutes) # Create schedule sched.name = self.name sched.schedule_info = netapp_utils.ScheduleInfo( volume_ids=self.volumes, snapshot_name=self.snapshot_name, retention=self.retention ) sched.paused = self.paused sched.recurring = self.recurring sched.starting_date = self.starting_date self.create_schedule_result = self.sfe.create_schedule(schedule=sched) except Exception as e: self.module.fail_json(msg='Error creating schedule %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) def delete_schedule(self): try: get_schedule_result = self.sfe.get_schedule(schedule_id=self.schedule_id) sched = get_schedule_result.schedule sched.to_be_deleted = True self.sfe.modify_schedule(schedule=sched) except Exception as e: self.module.fail_json(msg='Error deleting schedule %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) def update_schedule(self): <|fim▁hole|> get_schedule_result = self.sfe.get_schedule(schedule_id=self.schedule_id) sched = get_schedule_result.schedule # Update schedule properties # if self.interval == 'time_interval': temp_frequency = netapp_utils.TimeIntervalFrequency(days=self.time_interval_days, hours=self.time_interval_hours, minutes=self.time_interval_minutes) if sched.frequency.days != temp_frequency.days or \ sched.frequency.hours != temp_frequency.hours \ or sched.frequency.minutes != temp_frequency.minutes: sched.frequency = temp_frequency sched.name = self.name if self.volumes is not None: sched.schedule_info.volume_ids = self.volumes if self.retention is not None: sched.schedule_info.retention = self.retention if self.snapshot_name is not None: sched.schedule_info.snapshot_name = self.snapshot_name if self.paused is not None: sched.paused = self.paused if self.recurring is not None: sched.recurring = self.recurring if self.starting_date is not None: sched.starting_date = self.starting_date # Make API call self.sfe.modify_schedule(schedule=sched) except Exception as e: self.module.fail_json(msg='Error updating schedule %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) def apply(self): changed = False schedule_exists = False update_schedule = False schedule_detail = self.get_schedule() if schedule_detail: schedule_exists = True if self.state == 'absent': changed = True elif self.state == 'present': # Check if we need to update the account if self.retention is not None and schedule_detail.schedule_info.retention != self.retention: update_schedule = True changed = True elif schedule_detail.name != self.name: update_schedule = True changed = True elif self.snapshot_name is not None and schedule_detail.schedule_info.snapshot_name != self.snapshot_name: update_schedule = True changed = True elif self.volumes is not None and schedule_detail.schedule_info.volume_ids != self.volumes: update_schedule = True changed = True elif self.paused is not None and schedule_detail.paused != self.paused: update_schedule = True changed = True elif self.recurring is not None and schedule_detail.recurring != self.recurring: update_schedule = True changed = True elif self.starting_date is not None and schedule_detail.starting_date != self.starting_date: update_schedule = True changed = True elif self.time_interval_minutes is not None or self.time_interval_hours is not None \ or self.time_interval_days is not None: temp_frequency = netapp_utils.TimeIntervalFrequency(days=self.time_interval_days, hours=self.time_interval_hours, minutes=self.time_interval_minutes) if schedule_detail.frequency.days != temp_frequency.days or \ schedule_detail.frequency.hours != temp_frequency.hours \ or schedule_detail.frequency.minutes != temp_frequency.minutes: update_schedule = True changed = True else: if self.state == 'present': changed = True if changed: if self.module.check_mode: # Skip changes pass else: if self.state == 'present': if not schedule_exists: self.create_schedule() elif update_schedule: self.update_schedule() elif self.state == 'absent': self.delete_schedule() if self.create_schedule_result is not None: self.module.exit_json(changed=changed, schedule_id=self.create_schedule_result.schedule_id) else: self.module.exit_json(changed=changed) def main(): v = SolidFireSnapShotSchedule() v.apply() if __name__ == '__main__': main()<|fim▁end|>
try:
<|file_name|>p486.cpp<|end_file_name|><|fim▁begin|>//BE NAME DOOST #include <iostream> #include <string> #include <vector> #include <algorithm> #include <cmath> #include <set> #include <iostream> using namespace std; int main() { string a,b; cin>>a>>b; set <char> d; for(int i=0;i<a.size();i++) d.insert(a[i]); int B=0,C=0; for(int i=0;i<b.size();i++) { if(a[i]==b[i]) B++; else<|fim▁hole|> if(s==tmp.size()) C++; } } cout<<B<<" "<<C; }<|fim▁end|>
{ set <char> tmp=d; int s=tmp.size(); tmp.insert(b[i]);
<|file_name|>webpack.test.config.js<|end_file_name|><|fim▁begin|>var path = require("path"); module.exports = { watch: false, module: { loaders: [ { test: /\.jsx?$/, exclude: /node_modules/, loader: 'babel-loader' } ], postLoaders: [{ // test: /\.js$/, exclude: /(test|node_modules|bower_components|test_helpers)\//, loader: 'istanbul-instrumenter' }], preLoaders: [ { test: /\.js$/, include: [ path.resolve(__dirname, "src") ], loader: "eslint-loader" }<|fim▁hole|> root: [path.join(__dirname, 'src/main'), path.join(__dirname, 'src/test_helpers')] } };<|fim▁end|>
] }, resolve: { // add bower components and main source to resolved
<|file_name|>trade.component.ts<|end_file_name|><|fim▁begin|>import { Component } from "@angular/core"; import { Router } from "@angular/router"; import { Order } from "classes-common/order"; import { AccountService } from "services/account.service"; import { promiseError } from "utils/utils"; @Component({ moduleId: module.id, templateUrl: "trade.component.html", }) export class TradeComponent { private order: Order = { id: null, user_id: null, quantity: null, listing_id: null, action: null, type: null, price: null, status: null, }; private submitFailed: boolean = false; constructor(private accountService: AccountService, private router: Router) {} submit() { this.accountService .createOrder(this.order) .then(() => this.router.navigate(["/orders"])) .catch(promiseError)<|fim▁hole|><|fim▁end|>
.catch(() => this.submitFailed = true); } };
<|file_name|>mips.rs<|end_file_name|><|fim▁begin|>//! Run-time feature detection for MIPS on Linux. use crate::detect::{Feature, cache, bit}; use super::auxvec; /// Performs run-time feature detection. #[inline] pub fn check_for(x: Feature) -> bool { cache::test(x as u32, detect_features) } /// Try to read the features from the auxiliary vector, and if that fails, try /// to read them from `/proc/cpuinfo`. fn detect_features() -> cache::Initializer { let mut value = cache::Initializer::default(); let enable_feature = |value: &mut cache::Initializer, f, enable| { if enable { value.set(f as u32); } }; // The values are part of the platform-specific [asm/hwcap.h][hwcap]<|fim▁hole|> // // [hwcap]: https://github.com/torvalds/linux/blob/master/arch/arm64/include/uapi/asm/hwcap.h if let Ok(auxv) = auxvec::auxv() { enable_feature(&mut value, Feature::msa, bit::test(auxv.hwcap, 1)); return value; } // TODO: fall back via `cpuinfo`. value }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from .make_haploblocks import get_haploblocks from .genetic_models import check_genetic_models from .model_score import get_model_score from .fix_variant import make_print_version from .variant_annotator import VariantAnnotator<|fim▁end|>
from __future__ import absolute_import
<|file_name|>octopi_ro.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ro" version="2.0"> <context> <name>MainWindow</name> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="14"/> <source>Octopi</source> <translation>Octopi</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="110"/> <source>Info</source> <translation>Info</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="115"/> <source>Files</source> <translation>Fișiere</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="120"/> <location filename="Projects/octopi/ui/mainwindow.ui" line="164"/> <source>Transaction</source> <translation>Tranzacție</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="125"/> <source>Output</source> <translation>Rezultat</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="147"/> <source>Help</source> <translation>Ajutor</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="155"/> <source>View</source> <translation>Mod afișare</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="171"/> <source>File</source> <translation>Fișier</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="184"/> <source>Search</source> <translation>Căutare</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="193"/> <source>Tools</source> <translation>Unelte</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="222"/> <location filename="Projects/octopi/ui/mainwindow.ui" line="225"/> <source>About</source> <translation>Despre</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="239"/> <source>Non installed</source> <translation>Neinstalate</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="248"/> <source>Commit</source> <translation>Comite</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="251"/> <source>Ctrl+M</source> <translation>Ctrl+M</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="260"/> <source>Rollback</source> <translation>Anulează</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="263"/> <source>Ctrl+B</source> <translation>Ctrl+B</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="272"/> <source>Sync database</source> <translation>Sincronizează baza de date</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="275"/> <source>Ctrl+D</source> <translation>Ctrl+D</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="284"/> <source>Exit</source> <translation>Ieșire</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="287"/> <source>Ctrl+Q</source> <translation>Ctrl+Q</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="296"/> <source>System upgrade</source> <translation>Actualizare de sistem</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="299"/> <source>Ctrl+U</source> <translation>Ctrl+U</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="308"/> <location filename="Projects/octopi/ui/mainwindow.ui" line="311"/> <location filename="Projects/octopi/ui/mainwindow.ui" line="513"/> <source>Install</source> <translation>Instalează</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="320"/> <location filename="Projects/octopi/ui/mainwindow.ui" line="323"/> <source>Remove</source> <translation>Șterge</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="328"/> <source>Outdated packages</source> <translation>Pachete învechite</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="337"/> <source>Get latest distro news</source> <translation>Obține ultimele știri ale distribuției</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="340"/> <source>Ctrl+G</source><|fim▁hole|> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="349"/> <source>Remove group</source> <translation>Șterge grupa</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="358"/> <source>Install group</source> <translation>Instalează grupa</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="367"/> <source>Collapse item</source> <translation>Pliază elementul</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="376"/> <source>Expand item</source> <translation>Desfășoară elementul</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="385"/> <source>Collapse all items</source> <translation>Pliază toate elementele</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="394"/> <source>Expand all items</source> <translation>Desfășoară toate elementele</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="403"/> <source>Open file</source> <translation>Deschide fișier</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="412"/> <source>Edit file</source> <translation>Editează fișier</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="421"/> <source>Open directory</source> <translation>Deschide director</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="430"/> <source>Open terminal here</source> <translation>Deschide un terminal aici</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="439"/> <source>Remove item</source> <translation>Șterge element</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="451"/> <source>Remove items</source> <translation>Șterge elemente</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="462"/> <source>By description</source> <translation>După descriere</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="470"/> <source>By name</source> <translation>După nume</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="479"/> <source>Find a file</source> <translation>Găsește un fișier</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="488"/> <source>Open root terminal</source> <translation>Deschide terminal rădăcină</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="493"/> <source>Usage</source> <translation>Folosință</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="496"/> <source>F1</source> <translation>F1</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="504"/> <source>Install local package...</source> <translation>Instalează pachet local...</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="521"/> <source>Pacman Log Viewer</source> <translation>Vizualizator istoric Pacman</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="533"/> <source>Repository Editor</source> <translation>Editor depozite</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="541"/> <source>Installed</source> <translation>Instalate</translation> </message> <message> <location filename="Projects/octopi/ui/mainwindow.ui" line="557"/> <source>By file</source> <translation>După fișier</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="55"/> <source>For more information, visit:</source> <translation>Pentru mai multe informații vizitați:</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="58"/> <source>A Qt-based Pacman frontend,</source> <translation>O interfață Pacman bazată pe Qt,</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="59"/> <source>licensed under the terms of</source> <translation>licențiată în termenii</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="77"/> <source>Package classification:</source> <translation>Clasificarea pachetelor:</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="80"/> <source>An installed package</source> <translation>Un pachet instalat</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="82"/> <source>An installed package (not required by others)</source> <translation>Un pachet instalat (ce nu e necesitat de altele)</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="85"/> <source>A foreign package, installed from</source> <translation>Un pachet străin, instalat din</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="88"/> <source>A non installed package</source> <translation>Un pachet neinstalat</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="91"/> <source>An outdated package</source> <translation>Un pachet învechit</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="94"/> <source>An outdated foreign package</source> <translation>Un pachet străin învechit</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="97"/> <source>A newer than repository package</source> <translation>Un pachet mai nou decât în depozit</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="100"/> <source>Basic usage help:</source> <translation>Ajutor de bază:</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="102"/> <source>Position the mouse over a package to see its description</source> <translation>Poziționați cursorul pe un pachet pentru a-i vedea descrierea</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="104"/> <source>Double click an installed package to see its contents</source> <translation>Dublu-clic pe un pachet instalat pentru a-i vedea conținutul</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="106"/> <source>Right click package to install/reinstall or remove it</source> <translation>Clic-dreapta pe un pachet pentru a-l (dez)instala sau șterge</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="109"/> <source>Alt+key sequences:</source> <translation>Secvențe cu tasta Alt:</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="111"/> <source>Alt+1 to switch to &apos;Info&apos; tab</source> <translation>Alt+1 pentru a trece la fila „Info”</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="113"/> <source>Alt+2 to switch to &apos;Files&apos; tab</source> <translation>Alt+2 pentru a trece la file „Fișiere”</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="115"/> <source>Alt+3 to switch to &apos;Transaction&apos; tab</source> <translation>Alt+3 pentru a trece la fila „Tranzacții”</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="117"/> <source>Alt+4 to switch to &apos;Output&apos; tab</source> <translation>Alt+4 pentru a trece la fila „Rezultat”</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="119"/> <source>Alt+5 to switch to &apos;News&apos; tab</source> <translation>Alt+5 pentru a trece la fila „Știri”</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="121"/> <source>Alt+6 or &apos;F1&apos; to show this help page</source> <translation>Alt+6 sau F1 pentru a reveni aici</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="124"/> <source>Control+key sequences:</source> <translation>Secvențe cu tasta Ctrl:</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="126"/> <source>Ctrl+D or &apos;File/Sync database&apos; to sync the local database with latest remote changes (pacman -Sy)</source> <translation>Ctrl+D sau „Fișier/Sincronizează baza de date” pentru a sincroniza baza de date locală cu ultimele schimbări de la distanță (pacman -Sy)</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="128"/> <source>Ctrl+U or &apos;File/System upgrade&apos; to make a full system upgrade (pacman -Su)</source> <translation>Ctrl+U sau „Fișier/Actualizare de sistem” pentru a face o actualizare de sistem completă (pacman -Su)</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="130"/> <source>Ctrl+L to find a package in the package list</source> <translation>Ctrl+L pentru a găsi un pachet în lista de pachete</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="132"/> <source>Ctrl+F to search for text inside tab Files, News and Usage</source> <translation>Ctrl+F pentru a căuta text în interiorul taburilor Fișiere, Știri și Folosință</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="136"/> <source>Ctrl+M or &apos;Transaction/Commit&apos; to start installation/removal of selected packages</source> <translation>Ctrl+M sau „Tranzacție/Comite” pentru a începe instalarea/ștergerea pachetelor selectate</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="138"/> <source>Ctrl+B or &apos;Transaction/Rollback&apos; to clear the selection of to be removed/installed packages</source> <translation>Ctrl+B sau „Tranzacție/Anulează” pentru a goli selecția din pachetele de instalat/șters</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="140"/> <source>Ctrl+G or &apos;File/Get latest distro news&apos; to retrieve the latest RSS based distro news</source> <translation>Ctrl+G sai „Fișier/Obține ultimele știri ale distribuției” pentru a oține ultimele știri pe RSS</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="142"/> <source>Ctrl+Q or &apos;File/Exit&apos; to exit the application</source> <translation>Ctrl+Q sau „Fișier/Ieșire” pentru a ieși din aplicație</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="145"/> <source>Control+shift+key sequences:</source> <translation>Secvențe cu tastele Ctrl+Shift:</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="147"/> <source>Ctrl+Shift+C to clean local packages cache (pacman -Sc)</source> <translation>Ctrl+Shift+C pentru a curăța cache-ul local de pachete (pacman -Sc)</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="149"/> <source>Ctrl+Shift+G to display all package groups</source> <translation>Ctrl+Shift+G pentru a arăta toate grupele de pachete</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="151"/> <source>Ctrl+Shift+R to remove Pacman&apos;s transaction lock file</source> <translation>Ctrl+Shift+R pentru a elimina fișierul de blocare a tranzacției Pacman</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="153"/> <source>Ctrl+Shift+Y to display %1 group</source> <translation>Ctrl+Shift+Y pentru a arăta grupul %1</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="156"/> <source>F+key sequences:</source> <translation>Secvențe cu tastele de funcții:</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="158"/> <source>F1 to show this help page</source> <translation>F1 pentru a reveni aici</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="160"/> <source>F4 to open a Terminal whitin the selected directory at Files tab</source> <translation>F4 pentru a deschide un terminal în directorul selectat în fila Fișiere</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="162"/> <source>F6 to open a File Manager whitin the selected directory at Files tab</source> <translation>F6 pentru a deschide administratorul de fișiere în directorul selectat în fila Fișiere</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="164"/> <source>F10 to maximize/demaximize package list view</source> <translation>F10 pentru a maximiza/demaximiza afișajul listei de pachete</translation> </message> <message> <location filename="Projects/octopi/src/mainwindow_help.cpp" line="166"/> <source>F12 to maximize/demaximize Tab&apos;s view</source> <translation>F12 pentru a maximiza/demaximiza afișajul filelor</translation> </message> </context> <context> <name>QObject</name> <message> <location filename="Projects/octopi/src/strconstants.h" line="41"/> <source>Octopi usage help:</source> <translation>Ajutor de utilizare Octopi:</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="42"/> <source>show application version.</source> <translation>arată versiunea aplicației.</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="43"/> <source>use a different Qt4 style (ex: -style gtk).</source> <translation>folosește un stil Qt4 diferit (ex.: -style gtk).</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="44"/> <source>use a different remove command (ex: -removecmd R).</source> <translation>folosește o comandă disferită de ștergere (ex.: -removecmd R).</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="45"/> <source>force a system upgrade at startup.</source> <translation>forțează o actualizare de sistem la pornire.</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="51"/> <source>All</source> <translation>Toate</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="116"/> <source>Antergos news</source> <translation>Știri Antergos</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="120"/> <source>ArchBSD news</source> <translation>Știri ArchBSD</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="124"/> <source>Arch Linux news</source> <translation>Știri Arch Linux</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="128"/> <source>Chakra news</source> <translation>Știri Chakra</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="132"/> <source>KaOS news</source> <translation>Știri KaOS</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="136"/> <source>Manjaro Linux news</source> <translation>Știri Manjaro Linux</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="140"/> <source>Netrunner Rolling news</source> <translation>Știri Netrunner Rolling</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="144"/> <source>No news could be found! Press Ctrl+G to download the latest news.</source> <translation>Nu s-au găsit știri! Apăsați Ctrl+G pentru a descărca ultimele știri.</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="148"/> <source>This Linux distro seems to be incompatible with Octopi!</source> <translation>Această distribuție Linux pare incompatibilă cu Octopi!</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="152"/> <source>Internet seems unavailable!</source> <translation>Internetul pare nedisponibil!</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="156"/> <source>Display all groups</source> <translation>Arată toate grupele</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="167"/> <source>Usage</source> <translation>Folosință</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="171"/> <source>About</source> <translation>Despre</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="175"/> <source>Name</source> <translation>Nume</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="179"/> <source>Version</source> <translation>Versiune</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="183"/> <source>Outdated version</source> <translation>Versiune învechită</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="187"/> <source>Available version</source> <translation>Versiune disponibilă</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="191"/> <source>No description available.</source> <translation>Nu există descriere.</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="195"/> <source>URL</source> <translation>URL</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="199"/> <source>Repository</source> <translation>Depozit</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="203"/> <source>Licenses</source> <translation>Licențe</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="207"/> <source>Group</source> <translation>Grup</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="211"/> <source>Groups</source> <translation>Grupe</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="215"/> <source>Provides</source> <translation>Furnizează</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="219"/> <source>Depends On</source> <translation>Depinde de</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="223"/> <source>Required By</source> <translation>Cerut de</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="227"/> <source>Optional For</source> <translation>Opțional pentru</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="231"/> <source>Optional Deps</source> <translation>Dependențe opționale</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="235"/> <source>Conflicts With</source> <translation>În conflict cu</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="239"/> <source>Replaces</source> <translation>Înlocuiește</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="243"/> <source>Popularity</source> <translation>Popularitate</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="247"/> <source>votes</source> <translation>voturi</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="251"/> <source>Download Size</source> <translation>Mărime descărcare</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="255"/> <source>Installed Size</source> <translation>Mărime instalată</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="259"/> <source>Packager</source> <translation>Împachetator</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="263"/> <source>Package</source> <translation>Pachet</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="267"/> <source>Packages</source> <translation>Pachete</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="271"/> <source>Architecture</source> <translation>Arhitectură</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="275"/> <source>Build Date</source> <translation>Data construirii</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="279"/> <source>Description</source> <translation>Descriere</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="283"/> <source>Attention</source> <translation>Atenție</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="287"/> <source>automatic</source> <translation>automat</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="291"/> <source>Password</source> <translation>Parolă</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="295"/> <source>Info</source> <translation>Info</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="299"/> <source>Files</source> <translation>Fișiere</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="303"/> <source>Transaction</source> <translation>Tranzacție</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="307"/> <source>Output</source> <translation>Rezultat</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="311"/> <source>News</source> <translation>Știri</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="315"/> <source>Contents of &quot;%1&quot;</source> <translation>Conținutul lui „%1”</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="319"/> <source>Find</source> <translation>Găsește</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="323"/> <source>Clear</source> <translation>Golește</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="327"/> <source> (outdated installed version is %1)</source> <translation>(versiunea învechită instalată este %1)</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="331"/> <source> (newer installed version is %1)</source> <translation>(versiunea mai nouă instalată este %1)</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="335"/> <source>Building package list...</source> <translation>Se generează lista de pachete...</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="339"/> <source>Searching for %1 latest news...</source> <translation>Se caută ultimele %1 știri...</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="343"/> <source>There is one outdated package in your system:</source> <translation>Există un pachet învechit în sistemul vostru:</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="347"/> <source>There are %1 outdated packages in your system:</source> <translation>Există %1 pachete învechite în sistemul vostru:</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="351"/> <source>(version %1 is available)</source> <translation>(versiunea %1 este disponibilă)</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="355"/> <source>%1 (%2) packages</source> <translation>%1 (%2) pachete</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="359"/> <source>%1 installed</source> <translation>%1 instalat</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="363"/> <source>%1 outdated</source> <translation>%1 învechit</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="367"/> <source>%1 available</source> <translation>%1 disponibil</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="371"/> <source>Cleaning package cache...</source> <translation>Se curăță cache-ul pachetelor...</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="375"/> <source>Removing Pacman&apos;s transaction lock file...</source> <translation>Se elimină fișierul de blocare a tranzacției Pacman...</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="379"/> <source>Syncing</source> <translation>Se sincronizează</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="383"/> <source>Press any key to continue...</source> <translation>Apăsați orice tastă pentru a continua...</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="391"/> <source>Synchronizing databases...</source> <translation>Se sincronizează bazele de date...</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="395"/> <source>is up to date</source> <translation>este actualizat</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="399"/> <source>Starting full system upgrade...</source> <translation>Începe actualizarea sistemului...</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="403"/> <source>Installing selected packages...</source> <translation>Se instalează pachetele selectate...</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="407"/> <source>Removing selected packages...</source> <translation>Se șterg pachetele selectate...</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="411"/> <source>Removing/installing selected packages...</source> <translation>Se șterg/se instalează pachetele selectate...</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="415"/> <source>Running command in terminal...</source> <translation>Se rulează comanda în teminal...</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="419"/> <source>Command finished OK!</source> <translation>Comanda s-a terminat bine!</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="423"/> <source>Command finished with errors!</source> <translation>Comanda s-a terminat cu erori!</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="427"/> <source>Retrieving %1</source> <translation>Se preia %1</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="431"/> <source>Total download size: %1 KB</source> <translation>Mărime totală a descărcării: %1 KB</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="435"/> <source>The following target needs to be retrieved</source> <translation>Următoarea țintă trebuie să fie preluată</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="439"/> <source>The following target will be removed</source> <translation>Următoarea țintă va fi ștearsă</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="443"/> <source>The following %1 targets need to be retrieved</source> <translation>Următoarele %1 ținte trebuie să fie preluate</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="447"/> <source>The following %1 targets will be removed</source> <translation>Următoarele %1 ținte vor fi șterse</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="451"/> <source>There are forbidden packages in the removal list!</source> <translation>Există pachete interzise în lista de ștergere!</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="455"/> <source>There are no new updates available!</source> <translation>Nu există actualizări disponibile!</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="459"/> <source>There is an update available!</source> <translation>Există o actualizare disponibilă!</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="463"/> <source>There are %1 updates available!</source> <translation>Există %1 actualizări disponibile!</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="467"/> <source>Confirm?</source> <translation>Confirmați?</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="471"/> <source>Warning!!!</source> <translation>Avertizare!!!</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="475"/> <source>Confirmation</source> <translation>Confirmare</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="479"/> <source>There has been a transaction error!</source> <translation>S-a produs o eroare de tranzacție!</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="483"/> <source>Do you want to execute this transaction in a Terminal?</source> <translation>Vreți să executați această tranzacție în terminal?</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="487"/> <source>Do you really want to clean the package cache?</source> <translation>Vreți să curățați cache-ul pachetelor?</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="491"/> <source>Do you really want to remove Pacman&apos;s transaction lock file?</source> <translation>Chiar vreți să eliminați fișierul de blocare a tranzacției Pacman?</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="495"/> <source>Do you really want to rollback the transaction?</source> <translation>Vreți să anulați tranzacția?</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="499"/> <source>Please, enter the administrator&apos;s password</source> <translation>Introduceți parola de administrator</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="505"/> <source>There are no means to get administrator&apos;s credentials.</source> <translation>Nu există mijloace pentru a obține certificarea de administrator.</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="509"/> <source>You&apos;ll need to install a su frontend like gksu or kdesu.</source> <translation>Va trebui să instalați o interfață su, precum gksu sau kdesu.</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="513"/> <source>You can not run Octopi with administrator&apos;s credentials.</source> <translation>Nu puteți rula Octopi cu certificare de administrator.</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="517"/> <source>There is a pending transaction</source> <translation>Există o tranzacție în așteptare</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="521"/> <source>Do you really want to quit?</source> <translation>Vreți să ieșiți?</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="525"/> <source>Executing command</source> <translation>Se execută comanda</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="529"/> <source>Run in terminal</source> <translation>Rulează în terminal</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="533"/> <source>Needs application restart to take effect</source> <translation>E nevoie de repornirea aplicației pentru a avea efect</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="537"/> <source>These changes need application restart to take effect!</source> <translation>Aceste schimbări au nevoie de repornirea aplicației pentru a avea efect!</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="541"/> <source>Select the packages you want to install</source> <translation>Selectați pachetele pe care le vreți instalate</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="545"/> <source>This file does not appear to be a simple text. Are you sure you want to open it?</source> <translation>Acest fișier nu pare a fi un simplu text. Sunteți sigur că vreți să-l deschideți?</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="550"/> <source>To be installed</source> <translation>De instalat</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="554"/> <source>To be removed</source> <translation>De șters</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="558"/> <source>remove</source> <translation>șterge</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="562"/> <source>install</source> <translation>instalează</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="566"/> <source>Remove item</source> <translation>Șterge elementul</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="570"/> <source>Remove items</source> <translation>Șterge elementele</translation> </message> <message> <location filename="Projects/octopi/src/strconstants.h" line="574"/> <source>Press Ctrl+A to select/deselect all</source> <translation>Apăsați Ctrl+A pentru a (de)selecta toate</translation> </message> </context> <context> <name>SearchBar</name> <message> <location filename="Projects/octopi/src/searchbar.cpp" line="63"/> <source>Previous</source> <translation>Înapoi</translation> </message> <message> <location filename="Projects/octopi/src/searchbar.cpp" line="67"/> <source>Next</source> <translation>Înainte</translation> </message> <message> <location filename="Projects/octopi/src/searchbar.cpp" line="79"/> <source>Close</source> <translation>Închide</translation> </message> </context> <context> <name>TransactionDialog</name> <message> <location filename="Projects/octopi/ui/transactiondialog.ui" line="20"/> <source>Form</source> <translation>Formular</translation> </message> <message> <location filename="Projects/octopi/ui/transactiondialog.ui" line="133"/> <source>Run in terminal</source> <translation>Rulează în terminal</translation> </message> </context> </TS><|fim▁end|>
<translation>Ctrl+G</translation>
<|file_name|>services.d.ts<|end_file_name|><|fim▁begin|>import { IInjectable } from "../common/common"; <|fim▁hole|>import * as angular from 'angular'; import IScope = angular.IScope; /** * Annotates a controller expression (may be a controller function(), a "controllername", * or "controllername as name") * * - Temporarily decorates $injector.instantiate. * - Invokes $controller() service * - Calls $injector.instantiate with controller constructor * - Annotate constructor * - Undecorate $injector * * returns an array of strings, which are the arguments of the controller expression */ export declare function annotateController(controllerExpression: (IInjectable | string)): string[]; declare module "../router" { interface UIRouter { /** @hidden TODO: move this to ng1.ts */ stateProvider: StateProvider; } } export declare function watchDigests($rootScope: IScope): void; export declare const getLocals: (ctx: ResolveContext) => TypedMap<any>;<|fim▁end|>
import { TypedMap } from "../common/common"; import { StateProvider } from "./stateProvider"; import { ResolveContext } from "../resolve/resolveContext";
<|file_name|>lumina-textedit_uz.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.1" language="uz_UZ"> <context> <name>ColorDialog</name> <message> <location filename="../ColorDialog.ui" line="14"/> <source>Customize Colors</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ColorDialog.ui" line="24"/> <source>Item Type</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ColorDialog.ui" line="29"/> <source>Color</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ColorDialog.ui" line="34"/> <source>Sample</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ColorDialog.ui" line="44"/> <location filename="../ColorDialog.cpp" line="55"/> <source>Select Color</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ColorDialog.ui" line="64"/> <source>Cancel</source> <translation type="unfinished"></translation> </message> <message> <location filename="../ColorDialog.ui" line="71"/> <source>Apply</source> <translation type="unfinished"></translation> </message> </context> <context> <name>MainUI</name> <message> <location filename="../MainUI.ui" line="14"/> <source>MainWindow</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="49"/> <source>Tab 1</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="57"/> <source>Find/Replace</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="87"/> <source>Find the previous match</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="90"/> <location filename="../MainUI.ui" line="113"/> <location filename="../MainUI.ui" line="164"/> <location filename="../MainUI.ui" line="177"/> <source>...</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="97"/> <source>Find:</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="110"/> <source>Find the next match</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="120"/> <source>Replace:</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="139"/> <source>Match case</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="161"/> <source>Replace next match</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="174"/> <source>Replace all matches (to end of document)</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="197"/> <source>File</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="210"/> <source>View</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="214"/> <source>Syntax Highlighting</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="227"/> <source>Edit</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="242"/> <source>toolBar</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="268"/> <source>Show Line Numbers</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="273"/> <source>None</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="278"/> <location filename="../MainUI.cpp" line="128"/> <source>New File</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="281"/> <source>Ctrl+N</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="289"/> <source>Open File</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="292"/> <source>Ctrl+O</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="300"/> <source>Save File</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="303"/> <source>Ctrl+S</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="311"/> <source>Save File As</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="316"/> <source>Close</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="319"/> <source>Ctrl+Q</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="327"/> <source>Close File</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="330"/> <source>Ctrl+W</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="338"/> <source>Customize Colors</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="349"/> <source>Wrap Lines</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="354"/><|fim▁hole|> <message> <location filename="../MainUI.ui" line="357"/> <source>Ctrl+F</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="365"/> <source>Replace</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.ui" line="368"/> <source>Ctrl+R</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.cpp" line="25"/> <source>Text Editor</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.cpp" line="135"/> <source>Open File(s)</source> <translation type="unfinished"></translation> </message> <message> <location filename="../MainUI.cpp" line="135"/> <source>Text Files (*)</source> <translation type="unfinished"></translation> </message> </context> <context> <name>PlainTextEditor</name> <message> <location filename="../PlainTextEditor.cpp" line="74"/> <source>Save File</source> <translation type="unfinished"></translation> </message> <message> <location filename="../PlainTextEditor.cpp" line="74"/> <source>Text File (*)</source> <translation type="unfinished"></translation> </message> </context> </TS><|fim▁end|>
<source>Find</source> <translation type="unfinished"></translation> </message>
<|file_name|>datatypes.rs<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. //! Defines the data-types of Arrow arrays. //! //! For an overview of the terminology used within the arrow project and more general //! information regarding data-types and memory layouts see //! [here](https://arrow.apache.org/docs/memory_layout.html). use std::fmt; use std::mem::size_of; #[cfg(feature = "simd")] use std::ops::{Add, Div, Mul, Sub}; use std::slice::from_raw_parts; use std::str::FromStr; #[cfg(feature = "simd")] use packed_simd::*; use serde_derive::{Deserialize, Serialize}; use serde_json::{json, Number, Value, Value::Number as VNumber}; use crate::error::{ArrowError, Result}; use std::sync::Arc; /// The possible relative types that are supported. /// /// The variants of this enum include primitive fixed size types as well as parametric or /// nested types. /// Currently the Rust implementation supports the following nested types: /// - `List<T>` /// - `Struct<T, U, V, ...>` /// /// Nested types can themselves be nested within other arrays. /// For more information on these types please see /// [here](https://arrow.apache.org/docs/memory_layout.html). #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum DataType { Boolean, Int8, Int16, Int32, Int64, UInt8, UInt16, UInt32, UInt64, Float16, Float32, Float64, Timestamp(TimeUnit), Date32(DateUnit), Date64(DateUnit), Time32(TimeUnit), Time64(TimeUnit), Interval(IntervalUnit), Utf8, List(Box<DataType>), FixedSizeList((Box<DataType>, i32)), Struct(Vec<Field>), } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum DateUnit { Day, Millisecond, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum TimeUnit { Second, Millisecond, Microsecond, Nanosecond, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum IntervalUnit { YearMonth, DayTime, } /// Contains the meta-data for a single relative type. /// /// The `Schema` object is an ordered collection of `Field` objects. #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Field { name: String, data_type: DataType, nullable: bool, } pub trait ArrowNativeType: fmt::Debug + Send + Sync + Copy + PartialOrd + FromStr + 'static { fn into_json_value(self) -> Option<Value>; } /// Trait indicating a primitive fixed-width type (bool, ints and floats). pub trait ArrowPrimitiveType: 'static { /// Corresponding Rust native type for the primitive type. type Native: ArrowNativeType; /// Returns the corresponding Arrow data type of this primitive type. fn get_data_type() -> DataType; /// Returns the bit width of this primitive type. fn get_bit_width() -> usize; /// Returns a default value of this primitive type. /// /// This is useful for aggregate array ops like `sum()`, `mean()`. fn default_value() -> Self::Native; } impl ArrowNativeType for bool { fn into_json_value(self) -> Option<Value> { Some(self.into()) } } impl ArrowNativeType for i8 { fn into_json_value(self) -> Option<Value> { Some(VNumber(Number::from(self))) } } impl ArrowNativeType for i16 { fn into_json_value(self) -> Option<Value> { Some(VNumber(Number::from(self))) } } impl ArrowNativeType for i32 { fn into_json_value(self) -> Option<Value> { Some(VNumber(Number::from(self))) } } impl ArrowNativeType for i64 { fn into_json_value(self) -> Option<Value> { Some(VNumber(Number::from(self))) } } impl ArrowNativeType for u8 { fn into_json_value(self) -> Option<Value> { Some(VNumber(Number::from(self))) } } impl ArrowNativeType for u16 { fn into_json_value(self) -> Option<Value> { Some(VNumber(Number::from(self))) } } impl ArrowNativeType for u32 { fn into_json_value(self) -> Option<Value> { Some(VNumber(Number::from(self))) } } impl ArrowNativeType for u64 { fn into_json_value(self) -> Option<Value> { Some(VNumber(Number::from(self))) } } impl ArrowNativeType for f32 { fn into_json_value(self) -> Option<Value> { Number::from_f64(self as f64).map(|num| VNumber(num)) } } impl ArrowNativeType for f64 { fn into_json_value(self) -> Option<Value> { Number::from_f64(self).map(|num| VNumber(num)) } } macro_rules! make_type { ($name:ident, $native_ty:ty, $data_ty:expr, $bit_width:expr, $default_val:expr) => { pub struct $name {} impl ArrowPrimitiveType for $name { type Native = $native_ty; fn get_data_type() -> DataType { $data_ty } fn get_bit_width() -> usize { $bit_width } fn default_value() -> Self::Native { $default_val } } }; } make_type!(BooleanType, bool, DataType::Boolean, 1, false); make_type!(Int8Type, i8, DataType::Int8, 8, 0i8); make_type!(Int16Type, i16, DataType::Int16, 16, 0i16); make_type!(Int32Type, i32, DataType::Int32, 32, 0i32); make_type!(Int64Type, i64, DataType::Int64, 64, 0i64); make_type!(UInt8Type, u8, DataType::UInt8, 8, 0u8); make_type!(UInt16Type, u16, DataType::UInt16, 16, 0u16); make_type!(UInt32Type, u32, DataType::UInt32, 32, 0u32); make_type!(UInt64Type, u64, DataType::UInt64, 64, 0u64); make_type!(Float32Type, f32, DataType::Float32, 32, 0.0f32); make_type!(Float64Type, f64, DataType::Float64, 64, 0.0f64); make_type!( TimestampSecondType, i64, DataType::Timestamp(TimeUnit::Second), 64, 0i64 ); make_type!( TimestampMillisecondType, i64, DataType::Timestamp(TimeUnit::Millisecond), 64, 0i64 ); make_type!( TimestampMicrosecondType, i64, DataType::Timestamp(TimeUnit::Microsecond), 64, 0i64 ); make_type!( TimestampNanosecondType, i64, DataType::Timestamp(TimeUnit::Nanosecond), 64, 0i64 ); make_type!(Date32Type, i32, DataType::Date32(DateUnit::Day), 32, 0i32); make_type!( Date64Type, i64, DataType::Date64(DateUnit::Millisecond), 64, 0i64 ); make_type!( Time32SecondType, i32, DataType::Time32(TimeUnit::Second), 32, 0i32 ); make_type!( Time32MillisecondType, i32, DataType::Time32(TimeUnit::Millisecond), 32, 0i32 ); make_type!( Time64MicrosecondType, i64, DataType::Time64(TimeUnit::Microsecond), 64, 0i64 ); make_type!( Time64NanosecondType, i64, DataType::Time64(TimeUnit::Nanosecond), 64, 0i64 ); make_type!( IntervalYearMonthType, i64, DataType::Interval(IntervalUnit::YearMonth), 64, 0i64 ); make_type!( IntervalDayTimeType, i64, DataType::Interval(IntervalUnit::DayTime), 64, 0i64 ); /// A subtype of primitive type that represents numeric values. /// /// SIMD operations are defined in this trait if available on the target system. #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))] pub trait ArrowNumericType: ArrowPrimitiveType where Self::Simd: Add<Output = Self::Simd> + Sub<Output = Self::Simd> + Mul<Output = Self::Simd> + Div<Output = Self::Simd>, { /// Defines the SIMD type that should be used for this numeric type type Simd; /// Defines the SIMD Mask type that should be used for this numeric type type SimdMask; /// The number of SIMD lanes available fn lanes() -> usize; /// Initializes a SIMD register to a constant value fn init(value: Self::Native) -> Self::Simd; /// Loads a slice into a SIMD register fn load(slice: &[Self::Native]) -> Self::Simd; /// Creates a new SIMD mask for this SIMD type filling it with `value` fn mask_init(value: bool) -> Self::SimdMask; /// Gets the value of a single lane in a SIMD mask fn mask_get(mask: &Self::SimdMask, idx: usize) -> bool; /// Sets the value of a single lane of a SIMD mask fn mask_set(mask: Self::SimdMask, idx: usize, value: bool) -> Self::SimdMask; /// Selects elements of `a` and `b` using `mask` fn mask_select(mask: Self::SimdMask, a: Self::Simd, b: Self::Simd) -> Self::Simd; /// Returns `true` if any of the lanes in the mask are `true` fn mask_any(mask: Self::SimdMask) -> bool; /// Performs a SIMD binary operation fn bin_op<F: Fn(Self::Simd, Self::Simd) -> Self::Simd>( left: Self::Simd, right: Self::Simd, op: F, ) -> Self::Simd; // SIMD version of equal fn eq(left: Self::Simd, right: Self::Simd) -> Self::SimdMask; // SIMD version of not equal fn ne(left: Self::Simd, right: Self::Simd) -> Self::SimdMask; // SIMD version of less than fn lt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask; // SIMD version of less than or equal to fn le(left: Self::Simd, right: Self::Simd) -> Self::SimdMask; // SIMD version of greater than fn gt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask; // SIMD version of greater than or equal to fn ge(left: Self::Simd, right: Self::Simd) -> Self::SimdMask; /// Writes a SIMD result back to a slice fn write(simd_result: Self::Simd, slice: &mut [Self::Native]); } #[cfg(any( not(any(target_arch = "x86", target_arch = "x86_64")), not(feature = "simd") ))] pub trait ArrowNumericType: ArrowPrimitiveType {} macro_rules! make_numeric_type { ($impl_ty:ty, $native_ty:ty, $simd_ty:ident, $simd_mask_ty:ident) => { #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))] impl ArrowNumericType for $impl_ty { type Simd = $simd_ty; type SimdMask = $simd_mask_ty; fn lanes() -> usize { Self::Simd::lanes() } fn init(value: Self::Native) -> Self::Simd { Self::Simd::splat(value) } fn load(slice: &[Self::Native]) -> Self::Simd { unsafe { Self::Simd::from_slice_unaligned_unchecked(slice) } } fn mask_init(value: bool) -> Self::SimdMask { Self::SimdMask::splat(value) } fn mask_get(mask: &Self::SimdMask, idx: usize) -> bool { unsafe { mask.extract_unchecked(idx) } } fn mask_set(mask: Self::SimdMask, idx: usize, value: bool) -> Self::SimdMask { unsafe { mask.replace_unchecked(idx, value) } } /// Selects elements of `a` and `b` using `mask` fn mask_select( mask: Self::SimdMask, a: Self::Simd, b: Self::Simd, ) -> Self::Simd { mask.select(a, b) } fn mask_any(mask: Self::SimdMask) -> bool { mask.any() } fn bin_op<F: Fn(Self::Simd, Self::Simd) -> Self::Simd>( left: Self::Simd, right: Self::Simd, op: F, ) -> Self::Simd { op(left, right) } fn eq(left: Self::Simd, right: Self::Simd) -> Self::SimdMask { left.eq(right) } fn ne(left: Self::Simd, right: Self::Simd) -> Self::SimdMask { left.ne(right) } fn lt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask { left.lt(right) } fn le(left: Self::Simd, right: Self::Simd) -> Self::SimdMask { left.le(right) } fn gt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask { left.gt(right) } fn ge(left: Self::Simd, right: Self::Simd) -> Self::SimdMask { left.ge(right) } fn write(simd_result: Self::Simd, slice: &mut [Self::Native]) { unsafe { simd_result.write_to_slice_unaligned_unchecked(slice) }; } } #[cfg(any( not(any(target_arch = "x86", target_arch = "x86_64")), not(feature = "simd") ))] impl ArrowNumericType for $impl_ty {} }; } make_numeric_type!(Int8Type, i8, i8x64, m8x64); make_numeric_type!(Int16Type, i16, i16x32, m16x32); make_numeric_type!(Int32Type, i32, i32x16, m32x16); make_numeric_type!(Int64Type, i64, i64x8, m64x8); make_numeric_type!(UInt8Type, u8, u8x64, m8x64); make_numeric_type!(UInt16Type, u16, u16x32, m16x32); make_numeric_type!(UInt32Type, u32, u32x16, m32x16); make_numeric_type!(UInt64Type, u64, u64x8, m64x8); make_numeric_type!(Float32Type, f32, f32x16, m32x16); make_numeric_type!(Float64Type, f64, f64x8, m64x8); make_numeric_type!(TimestampSecondType, i64, i64x8, m64x8); make_numeric_type!(TimestampMillisecondType, i64, i64x8, m64x8); make_numeric_type!(TimestampMicrosecondType, i64, i64x8, m64x8); make_numeric_type!(TimestampNanosecondType, i64, i64x8, m64x8); make_numeric_type!(Date32Type, i32, i32x16, m32x16); make_numeric_type!(Date64Type, i64, i64x8, m64x8); make_numeric_type!(Time32SecondType, i32, i32x16, m32x16); make_numeric_type!(Time32MillisecondType, i32, i32x16, m32x16); make_numeric_type!(Time64MicrosecondType, i64, i64x8, m64x8); make_numeric_type!(Time64NanosecondType, i64, i64x8, m64x8); make_numeric_type!(IntervalYearMonthType, i64, i64x8, m64x8); make_numeric_type!(IntervalDayTimeType, i64, i64x8, m64x8); /// A subtype of primitive type that represents temporal values. pub trait ArrowTemporalType: ArrowPrimitiveType {} impl ArrowTemporalType for TimestampSecondType {} impl ArrowTemporalType for TimestampMillisecondType {} impl ArrowTemporalType for TimestampMicrosecondType {} impl ArrowTemporalType for TimestampNanosecondType {} impl ArrowTemporalType for Date32Type {} impl ArrowTemporalType for Date64Type {} impl ArrowTemporalType for Time32SecondType {} impl ArrowTemporalType for Time32MillisecondType {} impl ArrowTemporalType for Time64MicrosecondType {} impl ArrowTemporalType for Time64NanosecondType {} impl ArrowTemporalType for IntervalYearMonthType {} impl ArrowTemporalType for IntervalDayTimeType {} /// Allows conversion from supported Arrow types to a byte slice. pub trait ToByteSlice { /// Converts this instance into a byte slice fn to_byte_slice(&self) -> &[u8]; } impl<T: ArrowNativeType> ToByteSlice for [T] { fn to_byte_slice(&self) -> &[u8] { let raw_ptr = self.as_ptr() as *const T as *const u8; unsafe { from_raw_parts(raw_ptr, self.len() * size_of::<T>()) } } } impl<T: ArrowNativeType> ToByteSlice for T { fn to_byte_slice(&self) -> &[u8] { let raw_ptr = self as *const T as *const u8; unsafe { from_raw_parts(raw_ptr, size_of::<T>()) } } } impl DataType { /// Parse a data type from a JSON representation fn from(json: &Value) -> Result<DataType> { match *json { Value::Object(ref map) => match map.get("name") { Some(s) if s == "bool" => Ok(DataType::Boolean), Some(s) if s == "utf8" => Ok(DataType::Utf8), Some(s) if s == "floatingpoint" => match map.get("precision") { Some(p) if p == "HALF" => Ok(DataType::Float16), Some(p) if p == "SINGLE" => Ok(DataType::Float32), Some(p) if p == "DOUBLE" => Ok(DataType::Float64), _ => Err(ArrowError::ParseError( "floatingpoint precision missing or invalid".to_string(), )), }, Some(s) if s == "timestamp" => match map.get("unit") { Some(p) if p == "SECOND" => Ok(DataType::Timestamp(TimeUnit::Second)), Some(p) if p == "MILLISECOND" => { Ok(DataType::Timestamp(TimeUnit::Millisecond)) } Some(p) if p == "MICROSECOND" => { Ok(DataType::Timestamp(TimeUnit::Microsecond)) } Some(p) if p == "NANOSECOND" => { Ok(DataType::Timestamp(TimeUnit::Nanosecond)) } _ => Err(ArrowError::ParseError( "timestamp unit missing or invalid".to_string(), )), }, Some(s) if s == "date" => match map.get("unit") { Some(p) if p == "DAY" => Ok(DataType::Date32(DateUnit::Day)), Some(p) if p == "MILLISECOND" => { Ok(DataType::Date64(DateUnit::Millisecond)) } _ => Err(ArrowError::ParseError( "date unit missing or invalid".to_string(), )), }, Some(s) if s == "time" => { let unit = match map.get("unit") { Some(p) if p == "SECOND" => Ok(TimeUnit::Second), Some(p) if p == "MILLISECOND" => Ok(TimeUnit::Millisecond), Some(p) if p == "MICROSECOND" => Ok(TimeUnit::Microsecond), Some(p) if p == "NANOSECOND" => Ok(TimeUnit::Nanosecond), _ => Err(ArrowError::ParseError( "time unit missing or invalid".to_string(), )), }; match map.get("bitWidth") { Some(p) if p == 32 => Ok(DataType::Time32(unit?)), Some(p) if p == 64 => Ok(DataType::Time64(unit?)), _ => Err(ArrowError::ParseError( "time bitWidth missing or invalid".to_string(), )), } } Some(s) if s == "interval" => match map.get("unit") { Some(p) if p == "DAY_TIME" => { Ok(DataType::Interval(IntervalUnit::DayTime)) } Some(p) if p == "YEAR_MONTH" => { Ok(DataType::Interval(IntervalUnit::YearMonth)) } _ => Err(ArrowError::ParseError( "interval unit missing or invalid".to_string(), )), }, Some(s) if s == "int" => match map.get("isSigned") { Some(&Value::Bool(true)) => match map.get("bitWidth") { Some(&Value::Number(ref n)) => match n.as_u64() { Some(8) => Ok(DataType::Int8), Some(16) => Ok(DataType::Int16), Some(32) => Ok(DataType::Int32), Some(64) => Ok(DataType::Int32), _ => Err(ArrowError::ParseError( "int bitWidth missing or invalid".to_string(), )), }, _ => Err(ArrowError::ParseError( "int bitWidth missing or invalid".to_string(), )), }, Some(&Value::Bool(false)) => match map.get("bitWidth") { Some(&Value::Number(ref n)) => match n.as_u64() { Some(8) => Ok(DataType::UInt8), Some(16) => Ok(DataType::UInt16), Some(32) => Ok(DataType::UInt32), Some(64) => Ok(DataType::UInt64), _ => Err(ArrowError::ParseError( "int bitWidth missing or invalid".to_string(), )), }, _ => Err(ArrowError::ParseError( "int bitWidth missing or invalid".to_string(), )), }, _ => Err(ArrowError::ParseError( "int signed missing or invalid".to_string(), )), }, Some(s) if s == "list" => { // return a list with any type as its child isn't defined in the map Ok(DataType::List(Box::new(DataType::Boolean))) } Some(s) if s == "fixedsizelist" => { // return a list with any type as its child isn't defined in the map if let Some(Value::Number(size)) = map.get("listSize") { Ok(DataType::FixedSizeList(( Box::new(DataType::Boolean), size.as_i64().unwrap() as i32, ))) } else { Err(ArrowError::ParseError(format!( "Expecting a listSize for fixedsizelist", ))) } } Some(s) if s == "struct" => { // return an empty `struct` type as its children aren't defined in the map Ok(DataType::Struct(vec![])) } Some(other) => Err(ArrowError::ParseError(format!( "invalid or unsupported type name: {} in {:?}", other, json ))), None => Err(ArrowError::ParseError("type name missing".to_string())), }, _ => Err(ArrowError::ParseError( "invalid json value type".to_string(), )), } } /// Generate a JSON representation of the data type pub fn to_json(&self) -> Value { match self { DataType::Boolean => json!({"name": "bool"}), DataType::Int8 => json!({"name": "int", "bitWidth": 8, "isSigned": true}), DataType::Int16 => json!({"name": "int", "bitWidth": 16, "isSigned": true}), DataType::Int32 => json!({"name": "int", "bitWidth": 32, "isSigned": true}), DataType::Int64 => json!({"name": "int", "bitWidth": 64, "isSigned": true}), DataType::UInt8 => json!({"name": "int", "bitWidth": 8, "isSigned": false}), DataType::UInt16 => json!({"name": "int", "bitWidth": 16, "isSigned": false}), DataType::UInt32 => json!({"name": "int", "bitWidth": 32, "isSigned": false}), DataType::UInt64 => json!({"name": "int", "bitWidth": 64, "isSigned": false}), DataType::Float16 => json!({"name": "floatingpoint", "precision": "HALF"}), DataType::Float32 => json!({"name": "floatingpoint", "precision": "SINGLE"}), DataType::Float64 => json!({"name": "floatingpoint", "precision": "DOUBLE"}), DataType::Utf8 => json!({"name": "utf8"}), DataType::Struct(_) => json!({"name": "struct"}), DataType::List(_) => json!({ "name": "list"}), DataType::FixedSizeList((_, length)) => json!({"name":"fixedsizelist", "listSize": length}), DataType::Time32(unit) => { json!({"name": "time", "bitWidth": 32, "unit": match unit { TimeUnit::Second => "SECOND", TimeUnit::Millisecond => "MILLISECOND", TimeUnit::Microsecond => "MICROSECOND", TimeUnit::Nanosecond => "NANOSECOND", }}) } DataType::Time64(unit) => { json!({"name": "time", "bitWidth": 64, "unit": match unit { TimeUnit::Second => "SECOND", TimeUnit::Millisecond => "MILLISECOND", TimeUnit::Microsecond => "MICROSECOND", TimeUnit::Nanosecond => "NANOSECOND", }}) } DataType::Date32(unit) | DataType::Date64(unit) => { json!({"name": "date", "unit": match unit { DateUnit::Day => "DAY", DateUnit::Millisecond => "MILLISECOND", }}) } DataType::Timestamp(unit) => json!({"name": "timestamp", "unit": match unit { TimeUnit::Second => "SECOND", TimeUnit::Millisecond => "MILLISECOND", TimeUnit::Microsecond => "MICROSECOND", TimeUnit::Nanosecond => "NANOSECOND", }}), DataType::Interval(unit) => json!({"name": "interval", "unit": match unit { IntervalUnit::YearMonth => "YEAR_MONTH", IntervalUnit::DayTime => "DAY_TIME", }}), } } } impl Field { /// Creates a new field pub fn new(name: &str, data_type: DataType, nullable: bool) -> Self { Field { name: name.to_string(), data_type, nullable, } } /// Returns an immutable reference to the `Field`'s name pub fn name(&self) -> &String { &self.name } /// Returns an immutable reference to the `Field`'s data-type pub fn data_type(&self) -> &DataType { &self.data_type } /// Indicates whether this `Field` supports null values pub fn is_nullable(&self) -> bool { self.nullable } /// Parse a `Field` definition from a JSON representation pub fn from(json: &Value) -> Result<Self> { match *json { Value::Object(ref map) => { let name = match map.get("name") { Some(&Value::String(ref name)) => name.to_string(), _ => { return Err(ArrowError::ParseError( "Field missing 'name' attribute".to_string(), )); } }; let nullable = match map.get("nullable") { Some(&Value::Bool(b)) => b, _ => { return Err(ArrowError::ParseError( "Field missing 'nullable' attribute".to_string(), )); } }; let data_type = match map.get("type") { Some(t) => DataType::from(t)?, _ => { return Err(ArrowError::ParseError( "Field missing 'type' attribute".to_string(), )); } }; // if data_type is a struct or list, get its children let data_type = match data_type { DataType::List(_) | DataType::FixedSizeList(_) => { match map.get("children") { Some(Value::Array(values)) => { if values.len() != 1 { return Err(ArrowError::ParseError( "Field 'children' must have one element for a list data type".to_string(), )); } match data_type { DataType::List(_) => DataType::List(Box::new( Self::from(&values[0])?.data_type, )), DataType::FixedSizeList((_, int)) => { DataType::FixedSizeList(( Box::new(Self::from(&values[0])?.data_type), int, )) } _ => unreachable!( "Data type should be a list or fixedsizelist" ), } } Some(_) => { return Err(ArrowError::ParseError( "Field 'children' must be an array".to_string(), )) } None => { return Err(ArrowError::ParseError( "Field missing 'children' attribute".to_string(), )); } } } DataType::Struct(mut fields) => match map.get("children") { Some(Value::Array(values)) => { let struct_fields: Result<Vec<Field>> = values.iter().map(|v| Field::from(v)).collect(); fields.append(&mut struct_fields?); DataType::Struct(fields) } Some(_) => { return Err(ArrowError::ParseError( "Field 'children' must be an array".to_string(), )) } None => { return Err(ArrowError::ParseError( "Field missing 'children' attribute".to_string(), )); } }, _ => data_type, }; Ok(Field { name, nullable, data_type, }) } _ => Err(ArrowError::ParseError( "Invalid json value type for field".to_string(), )), } } /// Generate a JSON representation of the `Field` pub fn to_json(&self) -> Value { let children: Vec<Value> = match self.data_type() { DataType::Struct(fields) => fields.iter().map(|f| f.to_json()).collect(), DataType::List(dtype) => { let item = Field::new("item", *dtype.clone(), self.nullable); vec![item.to_json()] } DataType::FixedSizeList((dtype, _)) => { let item = Field::new("item", *dtype.clone(), self.nullable); vec![item.to_json()] } _ => vec![], }; json!({ "name": self.name, "nullable": self.nullable, "type": self.data_type.to_json(), "children": children }) } /// Converts to a `String` representation of the `Field` pub fn to_string(&self) -> String { format!("{}: {:?}", self.name, self.data_type) } } impl fmt::Display for Field { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.to_string()) } } /// Describes the meta-data of an ordered sequence of relative types. /// /// Note that this information is only part of the meta-data and not part of the physical /// memory layout. #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] pub struct Schema { pub(crate) fields: Vec<Field>, } impl Schema { /// Creates an empty `Schema` pub fn empty() -> Self { Self { fields: vec![] } } /// Creates a new `Schema` from a sequence of `Field` values /// /// # Example /// /// ``` /// # extern crate arrow; /// # use arrow::datatypes::{Field, DataType, Schema}; /// let field_a = Field::new("a", DataType::Int64, false); /// let field_b = Field::new("b", DataType::Boolean, false); /// /// let schema = Schema::new(vec![field_a, field_b]); /// ``` pub fn new(fields: Vec<Field>) -> Self { Self { fields } } /// Returns an immutable reference of the vector of `Field` instances pub fn fields(&self) -> &Vec<Field> { &self.fields } /// Returns an immutable reference of a specific `Field` instance selected using an /// offset within the internal `fields` vector pub fn field(&self, i: usize) -> &Field { &self.fields[i] } /// Look up a column by name and return a immutable reference to the column along with /// it's index pub fn column_with_name(&self, name: &str) -> Option<(usize, &Field)> { self.fields .iter() .enumerate() .find(|&(_, c)| c.name == name) } /// Generate a JSON representation of the `Schema` pub fn to_json(&self) -> Value { json!({ "fields": self.fields.iter().map(|field| field.to_json()).collect::<Vec<Value>>(), }) } /// Parse a `Schema` definition from a JSON representation pub fn from(json: &Value) -> Result<Self> { match *json { Value::Object(ref schema) => { if let Some(Value::Array(fields)) = schema.get("fields") { let fields: Result<Vec<Field>> = fields.iter().map(|f| Field::from(f)).collect(); Ok(Schema::new(fields?)) } else { return Err(ArrowError::ParseError( "Schema fields should be an array".to_string(), )); } } _ => Err(ArrowError::ParseError( "Invalid json value type for schema".to_string(), )), } } } impl fmt::Display for Schema { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str( &self .fields .iter() .map(|c| c.to_string()) .collect::<Vec<String>>() .join(", "), ) } } pub type SchemaRef = Arc<Schema>; #[cfg(test)] mod tests { use super::*; use serde_json; use serde_json::Number; use serde_json::Value::{Bool, Number as VNumber}; use std::f32::NAN; #[test] fn create_struct_type() { let _person = DataType::Struct(vec![ Field::new("first_name", DataType::Utf8, false), Field::new("last_name", DataType::Utf8, false), Field::new( "address", DataType::Struct(vec![ Field::new("street", DataType::Utf8, false), Field::new("zip", DataType::UInt16, false), ]), false, ), ]); } #[test] fn serde_struct_type() { let person = DataType::Struct(vec![ Field::new("first_name", DataType::Utf8, false), Field::new("last_name", DataType::Utf8, false), Field::new( "address", DataType::Struct(vec![ Field::new("street", DataType::Utf8, false), Field::new("zip", DataType::UInt16, false), ]), false, ), ]); let serialized = serde_json::to_string(&person).unwrap(); // NOTE that this is testing the default (derived) serialization format, not the // JSON format specified in metadata.md assert_eq!( "{\"Struct\":[\ {\"name\":\"first_name\",\"data_type\":\"Utf8\",\"nullable\":false},\ {\"name\":\"last_name\",\"data_type\":\"Utf8\",\"nullable\":false},\ {\"name\":\"address\",\"data_type\":{\"Struct\":\ [{\"name\":\"street\",\"data_type\":\"Utf8\",\"nullable\":false},\ {\"name\":\"zip\",\"data_type\":\"UInt16\",\"nullable\":false}\ ]},\"nullable\":false}]}", serialized ); let deserialized = serde_json::from_str(&serialized).unwrap(); assert_eq!(person, deserialized); } #[test] fn struct_field_to_json() { let f = Field::new( "address", DataType::Struct(vec![ Field::new("street", DataType::Utf8, false), Field::new("zip", DataType::UInt16, false), ]), false, ); let value: Value = serde_json::from_str( r#"{ "name": "address", "nullable": false, "type": { "name": "struct" }, "children": [ { "name": "street", "nullable": false, "type": { "name": "utf8" }, "children": [] }, { "name": "zip", "nullable": false, "type": { "name": "int", "bitWidth": 16, "isSigned": false }, "children": [] } ] }"#, ) .unwrap(); assert_eq!(value, f.to_json()); } #[test] fn primitive_field_to_json() { let f = Field::new("first_name", DataType::Utf8, false); let value: Value = serde_json::from_str( r#"{ "name": "first_name", "nullable": false, "type": { "name": "utf8" }, "children": [] }"#, ) .unwrap(); assert_eq!(value, f.to_json()); } #[test] fn parse_struct_from_json() { let json = r#" { "name": "address", "type": { "name": "struct" }, "nullable": false, "children": [ { "name": "street", "type": { "name": "utf8" }, "nullable": false, "children": [] }, { "name": "zip", "type": { "name": "int", "isSigned": false, "bitWidth": 16 }, "nullable": false, "children": [] } ] } "#; let value: Value = serde_json::from_str(json).unwrap(); let dt = Field::from(&value).unwrap(); let expected = Field::new( "address", DataType::Struct(vec![ Field::new("street", DataType::Utf8, false), Field::new("zip", DataType::UInt16, false), ]), false, ); assert_eq!(expected, dt); } #[test] fn parse_utf8_from_json() { let json = "{\"name\":\"utf8\"}"; let value: Value = serde_json::from_str(json).unwrap(); let dt = DataType::from(&value).unwrap(); assert_eq!(DataType::Utf8, dt); } #[test] fn parse_int32_from_json() { let json = "{\"name\": \"int\", \"isSigned\": true, \"bitWidth\": 32}"; let value: Value = serde_json::from_str(json).unwrap(); let dt = DataType::from(&value).unwrap(); assert_eq!(DataType::Int32, dt); } #[test] fn schema_json() { let schema = Schema::new(vec![ Field::new("c1", DataType::Utf8, false), Field::new("c2", DataType::Boolean, false), Field::new("c3", DataType::Date32(DateUnit::Day), false), Field::new("c4", DataType::Date64(DateUnit::Millisecond), false), Field::new("c7", DataType::Time32(TimeUnit::Second), false), Field::new("c8", DataType::Time32(TimeUnit::Millisecond), false), Field::new("c9", DataType::Time32(TimeUnit::Microsecond), false), Field::new("c10", DataType::Time32(TimeUnit::Nanosecond), false), Field::new("c11", DataType::Time64(TimeUnit::Second), false), Field::new("c12", DataType::Time64(TimeUnit::Millisecond), false), Field::new("c13", DataType::Time64(TimeUnit::Microsecond), false), Field::new("c14", DataType::Time64(TimeUnit::Nanosecond), false), Field::new("c15", DataType::Timestamp(TimeUnit::Second), false), Field::new("c16", DataType::Timestamp(TimeUnit::Millisecond), false), Field::new("c17", DataType::Timestamp(TimeUnit::Microsecond), false), Field::new("c18", DataType::Timestamp(TimeUnit::Nanosecond), false), Field::new("c19", DataType::Interval(IntervalUnit::DayTime), false), Field::new("c20", DataType::Interval(IntervalUnit::YearMonth), false), Field::new("c21", DataType::List(Box::new(DataType::Boolean)), false), Field::new( "c22", DataType::FixedSizeList((Box::new(DataType::Boolean), 5)), false, ), Field::new( "c23", DataType::List(Box::new(DataType::List(Box::new(DataType::Struct( vec![], ))))), true, ), Field::new( "c24", DataType::Struct(vec![ Field::new("a", DataType::Utf8, false), Field::new("b", DataType::UInt16, false), ]), false, ), ]); let expected = schema.to_json(); let json = r#"{ "fields": [ { "name": "c1", "nullable": false, "type": { "name": "utf8" }, "children": [] }, { "name": "c2", "nullable": false, "type": { "name": "bool" }, "children": [] }, { "name": "c3", "nullable": false, "type": { "name": "date", "unit": "DAY" }, "children": [] }, { "name": "c4", "nullable": false, "type": { "name": "date", "unit": "MILLISECOND" }, "children": [] }, { "name": "c7", "nullable": false, "type": { "name": "time", "bitWidth": 32, "unit": "SECOND" }, "children": [] }, { "name": "c8", "nullable": false, "type": { "name": "time", "bitWidth": 32, "unit": "MILLISECOND" }, "children": [] }, { "name": "c9", "nullable": false, "type": { "name": "time", "bitWidth": 32, "unit": "MICROSECOND" }, "children": [] }, { "name": "c10", "nullable": false, "type": { "name": "time", "bitWidth": 32, "unit": "NANOSECOND" }, "children": [] }, { "name": "c11", "nullable": false, "type": { "name": "time", "bitWidth": 64, "unit": "SECOND" }, "children": [] }, { "name": "c12", "nullable": false, "type": { "name": "time", "bitWidth": 64, "unit": "MILLISECOND" }, "children": [] }, { "name": "c13", "nullable": false, "type": { "name": "time", "bitWidth": 64, "unit": "MICROSECOND" }, "children": [] }, { "name": "c14", "nullable": false, "type": { "name": "time", "bitWidth": 64, "unit": "NANOSECOND" }, "children": [] }, { "name": "c15", "nullable": false, "type": { "name": "timestamp", "unit": "SECOND" }, "children": [] }, { "name": "c16", "nullable": false, "type": { "name": "timestamp", "unit": "MILLISECOND" }, "children": [] }, { "name": "c17", "nullable": false, "type": { "name": "timestamp", "unit": "MICROSECOND" }, "children": [] }, { "name": "c18", "nullable": false, "type": { "name": "timestamp", "unit": "NANOSECOND" }, "children": [] }, { "name": "c19", "nullable": false, "type": { "name": "interval", "unit": "DAY_TIME" }, "children": [] }, { "name": "c20", "nullable": false, "type": { "name": "interval", "unit": "YEAR_MONTH" }, "children": [] }, { "name": "c21", "nullable": false, "type": { "name": "list" }, "children": [ { "name": "item", "nullable": false, "type": { "name": "bool" }, "children": [] } ] }, { "name": "c22", "nullable": false, "type": { "name": "fixedsizelist", "listSize": 5 }, "children": [ { "name": "item", "nullable": false, "type": { "name": "bool" }, "children": [] } ] }, { "name": "c23", "nullable": true, "type": { "name": "list" }, "children": [ { "name": "item", "nullable": true, "type": { "name": "list" }, "children": [ { "name": "item", "nullable": true, "type": { "name": "struct" }, "children": [] } ] } ] }, { "name": "c24", "nullable": false, "type": { "name": "struct" }, "children": [ { "name": "a", "nullable": false, "type": { "name": "utf8" }, "children": [] }, { "name": "b", "nullable": false, "type": { "name": "int", "bitWidth": 16, "isSigned": false }, "children": [] } ] } ] }"#; let value: Value = serde_json::from_str(&json).unwrap(); assert_eq!(expected, value); // convert back to a schema let value: Value = serde_json::from_str(&json).unwrap(); let schema2 = Schema::from(&value).unwrap(); assert_eq!(schema, schema2); } #[test] fn create_schema_string() { let _person = Schema::new(vec![ Field::new("first_name", DataType::Utf8, false), Field::new("last_name", DataType::Utf8, false), Field::new( "address", DataType::Struct(vec![ Field::new("street", DataType::Utf8, false), Field::new("zip", DataType::UInt16, false), ]), false, ), ]); assert_eq!(_person.to_string(), "first_name: Utf8, last_name: Utf8, address: Struct([Field { name: \"street\", data_type: Utf8, nullable: false }, Field { name: \"zip\", data_type: UInt16, nullable: false }])") } #[test] fn schema_field_accessors() { let _person = Schema::new(vec![ Field::new("first_name", DataType::Utf8, false), Field::new("last_name", DataType::Utf8, false), Field::new( "address", DataType::Struct(vec![ Field::new("street", DataType::Utf8, false), Field::new("zip", DataType::UInt16, false), ]), false, ), ]); // test schema accessors assert_eq!(_person.fields().len(), 3); // test field accessors assert_eq!(_person.fields()[0].name(), "first_name"); assert_eq!(_person.fields()[0].data_type(), &DataType::Utf8); assert_eq!(_person.fields()[0].is_nullable(), false); } #[test] fn schema_equality() { let schema1 = Schema::new(vec![ Field::new("c1", DataType::Utf8, false), Field::new("c2", DataType::Float64, true), ]); let schema2 = Schema::new(vec![ Field::new("c1", DataType::Utf8, false), Field::new("c2", DataType::Float64, true), ]); assert_eq!(schema1, schema2); let schema3 = Schema::new(vec![ Field::new("c1", DataType::Utf8, false), Field::new("c2", DataType::Float32, true), ]); let schema4 = Schema::new(vec![ Field::new("C1", DataType::Utf8, false), Field::new("C2", DataType::Float64, true), ]); assert!(schema1 != schema3); assert!(schema1 != schema4); assert!(schema2 != schema3); assert!(schema2 != schema4); assert!(schema3 != schema4); } #[test] fn test_arrow_native_type_to_json() { assert_eq!(Some(Bool(true)), true.into_json_value()); assert_eq!(Some(VNumber(Number::from(1))), 1i8.into_json_value()); assert_eq!(Some(VNumber(Number::from(1))), 1i16.into_json_value()); assert_eq!(Some(VNumber(Number::from(1))), 1i32.into_json_value()); assert_eq!(Some(VNumber(Number::from(1))), 1i64.into_json_value()); assert_eq!(Some(VNumber(Number::from(1))), 1u8.into_json_value()); assert_eq!(Some(VNumber(Number::from(1))), 1u16.into_json_value()); assert_eq!(Some(VNumber(Number::from(1))), 1u32.into_json_value()); assert_eq!(Some(VNumber(Number::from(1))), 1u64.into_json_value()); assert_eq!( Some(VNumber(Number::from_f64(0.01 as f64).unwrap())), 0.01.into_json_value()<|fim▁hole|> ); assert_eq!(None, NAN.into_json_value()); } }<|fim▁end|>
); assert_eq!( Some(VNumber(Number::from_f64(0.01f64).unwrap())), 0.01f64.into_json_value()
<|file_name|>ZipOutputFileProvider.java<|end_file_name|><|fim▁begin|>// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.android.desugar.io; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.io.ByteStreams; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.zip.CRC32; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream;<|fim▁hole|>/** Output provider is a zip file. */ class ZipOutputFileProvider implements OutputFileProvider { private final ZipOutputStream out; public ZipOutputFileProvider(Path root) throws IOException { out = new ZipOutputStream(new BufferedOutputStream(Files.newOutputStream(root))); } @Override public void copyFrom(String filename, InputFileProvider inputFileProvider) throws IOException { // TODO(bazel-team): Avoid de- and re-compressing resource files out.putNextEntry(inputFileProvider.getZipEntry(filename)); try (InputStream is = inputFileProvider.getInputStream(filename)) { ByteStreams.copy(is, out); } out.closeEntry(); } @Override public void write(String filename, byte[] content) throws IOException { checkArgument(filename.equals(DESUGAR_DEPS_FILENAME) || filename.endsWith(".class"), "Expect file to be copied: %s", filename); writeStoredEntry(out, filename, content); } @Override public void close() throws IOException { out.close(); } private static void writeStoredEntry(ZipOutputStream out, String filename, byte[] content) throws IOException { // Need to pre-compute checksum for STORED (uncompressed) entries) CRC32 checksum = new CRC32(); checksum.update(content); ZipEntry result = new ZipEntry(filename); result.setTime(0L); // Use stable timestamp Jan 1 1980 result.setCrc(checksum.getValue()); result.setSize(content.length); result.setCompressedSize(content.length); // Write uncompressed, since this is just an intermediary artifact that // we will convert to .dex result.setMethod(ZipEntry.STORED); out.putNextEntry(result); out.write(content); out.closeEntry(); } }<|fim▁end|>
<|file_name|>ti.py<|end_file_name|><|fim▁begin|>""" Type Inference """ from .typevar import TypeVar from .ast import Def, Var from copy import copy from itertools import product try: from typing import Dict, TYPE_CHECKING, Union, Tuple, Optional, Set # noqa from typing import Iterable, List, Any, TypeVar as MTypeVar # noqa from typing import cast from .xform import Rtl, XForm # noqa from .ast import Expr # noqa from .typevar import TypeSet # noqa if TYPE_CHECKING: T = MTypeVar('T') TypeMap = Dict[TypeVar, TypeVar] VarTyping = Dict[Var, TypeVar] except ImportError: TYPE_CHECKING = False pass class TypeConstraint(object): """ Base class for all runtime-emittable type constraints. """ def __init__(self, tv, tc): # type: (TypeVar, Union[TypeVar, TypeSet]) -> None """ Abstract "constructor" for linters """ assert False, "Abstract" def translate(self, m): # type: (Union[TypeEnv, TypeMap]) -> TypeConstraint """ Translate any TypeVars in the constraint according to the map or TypeEnv m """ def translate_one(a): # type: (Any) -> Any if (isinstance(a, TypeVar)): return m[a] if isinstance(m, TypeEnv) else subst(a, m) return a res = None # type: TypeConstraint res = self.__class__(*tuple(map(translate_one, self._args()))) return res def __eq__(self, other): # type: (object) -> bool if (not isinstance(other, self.__class__)): return False assert isinstance(other, TypeConstraint) # help MyPy figure out other return self._args() == other._args() def is_concrete(self): # type: () -> bool """ Return true iff all typevars in the constraint are singletons. """ return [] == list(filter(lambda x: x.singleton_type() is None, self.tvs())) def __hash__(self): # type: () -> int return hash(self._args()) def _args(self): # type: () -> Tuple[Any,...] """ Return a tuple with the exact arguments passed to __init__ to create this object. """ assert False, "Abstract" def tvs(self): # type: () -> Iterable[TypeVar] """ Return the typevars contained in this constraint. """ return list(filter(lambda x: isinstance(x, TypeVar), self._args())) def is_trivial(self): # type: () -> bool """ Return true if this constrain is statically decidable. """ assert False, "Abstract" def eval(self): # type: () -> bool """ Evaluate this constraint. Should only be called when the constraint has been translated to concrete types. """ assert False, "Abstract" def __repr__(self): # type: () -> str return (self.__class__.__name__ + '(' + ', '.join(map(str, self._args())) + ')') class TypesEqual(TypeConstraint): """ Constraint specifying that two derived type vars must have the same runtime type. """ def __init__(self, tv1, tv2): # type: (TypeVar, TypeVar) -> None (self.tv1, self.tv2) = sorted([tv1, tv2], key=repr) def _args(self): # type: () -> Tuple[Any,...] """ See TypeConstraint._args() """ return (self.tv1, self.tv2) def is_trivial(self): # type: () -> bool """ See TypeConstraint.is_trivial() """ return self.tv1 == self.tv2 or self.is_concrete() def eval(self): # type: () -> bool """ See TypeConstraint.eval() """ assert self.is_concrete() return self.tv1.singleton_type() == self.tv2.singleton_type() class InTypeset(TypeConstraint): """ Constraint specifying that a type var must belong to some typeset. """ def __init__(self, tv, ts): # type: (TypeVar, TypeSet) -> None assert not tv.is_derived and tv.name.startswith("typeof_") self.tv = tv self.ts = ts def _args(self): # type: () -> Tuple[Any,...] """ See TypeConstraint._args() """ return (self.tv, self.ts) def is_trivial(self): # type: () -> bool """ See TypeConstraint.is_trivial() """ tv_ts = self.tv.get_typeset().copy() # Trivially True if (tv_ts.issubset(self.ts)): return True # Trivially false tv_ts &= self.ts if (tv_ts.size() == 0): return True return self.is_concrete() def eval(self): # type: () -> bool """ See TypeConstraint.eval() """ assert self.is_concrete() return self.tv.get_typeset().issubset(self.ts) class WiderOrEq(TypeConstraint): """ Constraint specifying that a type var tv1 must be wider than or equal to type var tv2 at runtime. This requires that: 1) They have the same number of lanes 2) In a lane tv1 has at least as many bits as tv2. """ def __init__(self, tv1, tv2): # type: (TypeVar, TypeVar) -> None self.tv1 = tv1 self.tv2 = tv2 def _args(self): # type: () -> Tuple[Any,...] """ See TypeConstraint._args() """ return (self.tv1, self.tv2) def is_trivial(self): # type: () -> bool """ See TypeConstraint.is_trivial() """ # Trivially true if (self.tv1 == self.tv2): return True ts1 = self.tv1.get_typeset() ts2 = self.tv2.get_typeset() def set_wider_or_equal(s1, s2): # type: (Set[int], Set[int]) -> bool return len(s1) > 0 and len(s2) > 0 and min(s1) >= max(s2) # Trivially True if set_wider_or_equal(ts1.ints, ts2.ints) and\ set_wider_or_equal(ts1.floats, ts2.floats) and\ set_wider_or_equal(ts1.bools, ts2.bools): return True def set_narrower(s1, s2): # type: (Set[int], Set[int]) -> bool return len(s1) > 0 and len(s2) > 0 and min(s1) < max(s2) # Trivially False if set_narrower(ts1.ints, ts2.ints) and\ set_narrower(ts1.floats, ts2.floats) and\ set_narrower(ts1.bools, ts2.bools): return True # Trivially False if len(ts1.lanes.intersection(ts2.lanes)) == 0: return True return self.is_concrete() def eval(self): # type: () -> bool """ See TypeConstraint.eval() """ assert self.is_concrete() typ1 = self.tv1.singleton_type() typ2 = self.tv2.singleton_type() return typ1.wider_or_equal(typ2) class SameWidth(TypeConstraint): """ Constraint specifying that two types have the same width. E.g. i32x2 has the same width as i64x1, i16x4, f32x2, f64, b1x64 etc. """ def __init__(self, tv1, tv2): # type: (TypeVar, TypeVar) -> None self.tv1 = tv1 self.tv2 = tv2 def _args(self): # type: () -> Tuple[Any,...] """ See TypeConstraint._args() """ return (self.tv1, self.tv2) def is_trivial(self): # type: () -> bool """ See TypeConstraint.is_trivial() """ # Trivially true if (self.tv1 == self.tv2): return True ts1 = self.tv1.get_typeset() ts2 = self.tv2.get_typeset() # Trivially False if len(ts1.widths().intersection(ts2.widths())) == 0: return True return self.is_concrete() def eval(self): # type: () -> bool """ See TypeConstraint.eval() """ assert self.is_concrete() typ1 = self.tv1.singleton_type() typ2 = self.tv2.singleton_type() return (typ1.width() == typ2.width()) class TypeEnv(object): """ Class encapsulating the necessary book keeping for type inference. :attribute type_map: dict holding the equivalence relations between tvs :attribute constraints: a list of accumulated constraints - tuples (tv1, tv2)) where tv1 and tv2 are equal :attribute ranks: dictionary recording the (optional) ranks for tvs. 'rank' is a partial ordering on TVs based on their origin. See comments in rank() and register(). :attribute vars: a set containing all known Vars :attribute idx: counter used to get fresh ids """ RANK_SINGLETON = 5 RANK_INPUT = 4 RANK_INTERMEDIATE = 3 RANK_OUTPUT = 2 RANK_TEMP = 1 RANK_INTERNAL = 0 def __init__(self, arg=None): # type: (Optional[Tuple[TypeMap, List[TypeConstraint]]]) -> None self.ranks = {} # type: Dict[TypeVar, int] self.vars = set() # type: Set[Var] if arg is None: self.type_map = {} # type: TypeMap self.constraints = [] # type: List[TypeConstraint] else: self.type_map, self.constraints = arg self.idx = 0 def __getitem__(self, arg): # type: (Union[TypeVar, Var]) -> TypeVar """ Lookup the canonical representative for a Var/TypeVar. """ if (isinstance(arg, Var)): assert arg in self.vars tv = arg.get_typevar() else: assert (isinstance(arg, TypeVar)) tv = arg while tv in self.type_map: tv = self.type_map[tv]<|fim▁hole|> def equivalent(self, tv1, tv2): # type: (TypeVar, TypeVar) -> None """ Record a that the free tv1 is part of the same equivalence class as tv2. The canonical representative of the merged class is tv2's canonical representative. """ assert not tv1.is_derived assert self[tv1] == tv1 # Make sure we don't create cycles if tv2.is_derived: assert self[tv2.base] != tv1 self.type_map[tv1] = tv2 def add_constraint(self, constr): # type: (TypeConstraint) -> None """ Add a new constraint """ if (constr in self.constraints): return # InTypeset constraints can be expressed by constraining the typeset of # a variable. No need to add them to self.constraints if (isinstance(constr, InTypeset)): self[constr.tv].constrain_types_by_ts(constr.ts) return self.constraints.append(constr) def get_uid(self): # type: () -> str r = str(self.idx) self.idx += 1 return r def __repr__(self): # type: () -> str return self.dot() def rank(self, tv): # type: (TypeVar) -> int """ Get the rank of tv in the partial order. TVs directly associated with a Var get their rank from the Var (see register()). Internally generated non-derived TVs implicitly get the lowest rank (0). Derived variables get their rank from their free typevar. Singletons have the highest rank. TVs associated with vars in a source pattern have a higher rank than TVs associated with temporary vars. """ default_rank = TypeEnv.RANK_INTERNAL if tv.singleton_type() is None \ else TypeEnv.RANK_SINGLETON if tv.is_derived: tv = tv.free_typevar() return self.ranks.get(tv, default_rank) def register(self, v): # type: (Var) -> None """ Register a new Var v. This computes a rank for the associated TypeVar for v, which is used to impose a partial order on type variables. """ self.vars.add(v) if v.is_input(): r = TypeEnv.RANK_INPUT elif v.is_intermediate(): r = TypeEnv.RANK_INTERMEDIATE elif v.is_output(): r = TypeEnv.RANK_OUTPUT else: assert(v.is_temp()) r = TypeEnv.RANK_TEMP self.ranks[v.get_typevar()] = r def free_typevars(self): # type: () -> List[TypeVar] """ Get the free typevars in the current type env. """ tvs = set([self[tv].free_typevar() for tv in self.type_map.keys()]) tvs = tvs.union(set([self[v].free_typevar() for v in self.vars])) # Filter out None here due to singleton type vars return sorted(filter(lambda x: x is not None, tvs), key=lambda x: x.name) def normalize(self): # type: () -> None """ Normalize by: - collapsing any roots that don't correspond to a concrete TV AND have a single TV derived from them or equivalent to them E.g. if we have a root of the tree that looks like: typeof_a typeof_b \\ / typeof_x | half_width(1) | 1 we want to collapse the linear path between 1 and typeof_x. The resulting graph is: typeof_a typeof_b \\ / typeof_x """ source_tvs = set([v.get_typevar() for v in self.vars]) children = {} # type: Dict[TypeVar, Set[TypeVar]] for v in self.type_map.values(): if not v.is_derived: continue t = v.free_typevar() s = children.get(t, set()) s.add(v) children[t] = s for (a, b) in self.type_map.items(): s = children.get(b, set()) s.add(a) children[b] = s for r in self.free_typevars(): while (r not in source_tvs and r in children and len(children[r]) == 1): child = list(children[r])[0] if child in self.type_map: assert self.type_map[child] == r del self.type_map[child] r = child def extract(self): # type: () -> TypeEnv """ Extract a clean type environment from self, that only mentions TVs associated with real variables """ vars_tvs = set([v.get_typevar() for v in self.vars]) new_type_map = {tv: self[tv] for tv in vars_tvs if tv != self[tv]} new_constraints = [] # type: List[TypeConstraint] for constr in self.constraints: constr = constr.translate(self) if constr.is_trivial() or constr in new_constraints: continue # Sanity: translated constraints should refer to only real vars for arg in constr._args(): if (not isinstance(arg, TypeVar)): continue arg_free_tv = arg.free_typevar() assert arg_free_tv is None or arg_free_tv in vars_tvs new_constraints.append(constr) # Sanity: translated typemap should refer to only real vars for (k, v) in new_type_map.items(): assert k in vars_tvs assert v.free_typevar() is None or v.free_typevar() in vars_tvs t = TypeEnv() t.type_map = new_type_map t.constraints = new_constraints # ranks and vars contain only TVs associated with real vars t.ranks = copy(self.ranks) t.vars = copy(self.vars) return t def concrete_typings(self): # type: () -> Iterable[VarTyping] """ Return an iterable over all possible concrete typings permitted by this TypeEnv. """ free_tvs = self.free_typevars() free_tv_iters = [tv.get_typeset().concrete_types() for tv in free_tvs] for concrete_types in product(*free_tv_iters): # Build type substitutions for all free vars m = {tv: TypeVar.singleton(typ) for (tv, typ) in zip(free_tvs, concrete_types)} concrete_var_map = {v: subst(self[v.get_typevar()], m) for v in self.vars} # Check if constraints are satisfied for this typing failed = None for constr in self.constraints: concrete_constr = constr.translate(m) if not concrete_constr.eval(): failed = concrete_constr break if (failed is not None): continue yield concrete_var_map def permits(self, concrete_typing): # type: (VarTyping) -> bool """ Return true iff this TypeEnv permits the (possibly partial) concrete variable type mapping concrete_typing. """ # Each variable has a concrete type, that is a subset of its inferred # typeset. for (v, typ) in concrete_typing.items(): assert typ.singleton_type() is not None if not typ.get_typeset().issubset(self[v].get_typeset()): return False m = {self[v]: typ for (v, typ) in concrete_typing.items()} # Constraints involving vars in concrete_typing are satisfied for constr in self.constraints: try: # If the constraint includes only vars in concrete_typing, we # can translate it using m. Otherwise we encounter a KeyError # and ignore it constr = constr.translate(m) if not constr.eval(): return False except KeyError: pass return True def dot(self): # type: () -> str """ Return a representation of self as a graph in dot format. Nodes correspond to TypeVariables. Dotted edges correspond to equivalences between TVS Solid edges correspond to derivation relations between TVs. Dashed edges correspond to equivalence constraints. """ def label(s): # type: (TypeVar) -> str return "\"" + str(s) + "\"" # Add all registered TVs (as some of them may be singleton nodes not # appearing in the graph nodes = set() # type: Set[TypeVar] edges = set() # type: Set[Tuple[TypeVar, TypeVar, str, str, Optional[str]]] # noqa def add_nodes(*args): # type: (*TypeVar) -> None for tv in args: nodes.add(tv) while (tv.is_derived): nodes.add(tv.base) edges.add((tv, tv.base, "solid", "forward", tv.derived_func)) tv = tv.base for v in self.vars: add_nodes(v.get_typevar()) for (tv1, tv2) in self.type_map.items(): # Add all intermediate TVs appearing in edges add_nodes(tv1, tv2) edges.add((tv1, tv2, "dotted", "forward", None)) for constr in self.constraints: if isinstance(constr, TypesEqual): add_nodes(constr.tv1, constr.tv2) edges.add((constr.tv1, constr.tv2, "dashed", "none", "equal")) elif isinstance(constr, WiderOrEq): add_nodes(constr.tv1, constr.tv2) edges.add((constr.tv1, constr.tv2, "dashed", "forward", ">=")) elif isinstance(constr, SameWidth): add_nodes(constr.tv1, constr.tv2) edges.add((constr.tv1, constr.tv2, "dashed", "none", "same_width")) else: assert False, "Can't display constraint {}".format(constr) root_nodes = set([x for x in nodes if x not in self.type_map and not x.is_derived]) r = "digraph {\n" for n in nodes: r += label(n) if n in root_nodes: r += "[xlabel=\"{}\"]".format(self[n].get_typeset()) r += ";\n" for (n1, n2, style, direction, elabel) in edges: e = label(n1) + "->" + label(n2) e += "[style={},dir={}".format(style, direction) if elabel is not None: e += ",label=\"{}\"".format(elabel) e += "];\n" r += e r += "}" return r if TYPE_CHECKING: TypingError = str TypingOrError = Union[TypeEnv, TypingError] def get_error(typing_or_err): # type: (TypingOrError) -> Optional[TypingError] """ Helper function to appease mypy when checking the result of typing. """ if isinstance(typing_or_err, str): if (TYPE_CHECKING): return cast(TypingError, typing_or_err) else: return typing_or_err else: return None def get_type_env(typing_or_err): # type: (TypingOrError) -> TypeEnv """ Helper function to appease mypy when checking the result of typing. """ assert isinstance(typing_or_err, TypeEnv), \ "Unexpected error: {}".format(typing_or_err) if (TYPE_CHECKING): return cast(TypeEnv, typing_or_err) else: return typing_or_err def subst(tv, tv_map): # type: (TypeVar, TypeMap) -> TypeVar """ Perform substition on the input tv using the TypeMap tv_map. """ if tv in tv_map: return tv_map[tv] if tv.is_derived: return TypeVar.derived(subst(tv.base, tv_map), tv.derived_func) return tv def normalize_tv(tv): # type: (TypeVar) -> TypeVar """ Normalize a (potentially derived) TV using the following rules: - vector and width derived functions commute {HALF,DOUBLE}VECTOR({HALF,DOUBLE}WIDTH(base)) -> {HALF,DOUBLE}WIDTH({HALF,DOUBLE}VECTOR(base)) - half/double pairs collapse {HALF,DOUBLE}WIDTH({DOUBLE,HALF}WIDTH(base)) -> base {HALF,DOUBLE}VECTOR({DOUBLE,HALF}VECTOR(base)) -> base """ vector_derives = [TypeVar.HALFVECTOR, TypeVar.DOUBLEVECTOR] width_derives = [TypeVar.HALFWIDTH, TypeVar.DOUBLEWIDTH] if not tv.is_derived: return tv df = tv.derived_func if (tv.base.is_derived): base_df = tv.base.derived_func # Reordering: {HALFWIDTH, DOUBLEWIDTH} commute with {HALFVECTOR, # DOUBLEVECTOR}. Arbitrarily pick WIDTH < VECTOR if df in vector_derives and base_df in width_derives: return normalize_tv( TypeVar.derived( TypeVar.derived(tv.base.base, df), base_df)) # Cancelling: HALFWIDTH, DOUBLEWIDTH and HALFVECTOR, DOUBLEVECTOR # cancel each other. Note: This doesn't hide any over/underflows, # since we 1) assert the safety of each TV in the chain upon its # creation, and 2) the base typeset is only allowed to shrink. if (df, base_df) in \ [(TypeVar.HALFVECTOR, TypeVar.DOUBLEVECTOR), (TypeVar.DOUBLEVECTOR, TypeVar.HALFVECTOR), (TypeVar.HALFWIDTH, TypeVar.DOUBLEWIDTH), (TypeVar.DOUBLEWIDTH, TypeVar.HALFWIDTH)]: return normalize_tv(tv.base.base) return TypeVar.derived(normalize_tv(tv.base), df) def constrain_fixpoint(tv1, tv2): # type: (TypeVar, TypeVar) -> None """ Given typevars tv1 and tv2 (which could be derived from one another) constrain their typesets to be the same. When one is derived from the other, repeat the constrain process until fixpoint. """ # Constrain tv2's typeset as long as tv1's typeset is changing. while True: old_tv1_ts = tv1.get_typeset().copy() tv2.constrain_types(tv1) if tv1.get_typeset() == old_tv1_ts: break old_tv2_ts = tv2.get_typeset().copy() tv1.constrain_types(tv2) assert old_tv2_ts == tv2.get_typeset() def unify(tv1, tv2, typ): # type: (TypeVar, TypeVar, TypeEnv) -> TypingOrError """ Unify tv1 and tv2 in the current type environment typ, and return an updated type environment or error. """ tv1 = normalize_tv(typ[tv1]) tv2 = normalize_tv(typ[tv2]) # Already unified if tv1 == tv2: return typ if typ.rank(tv2) < typ.rank(tv1): return unify(tv2, tv1, typ) constrain_fixpoint(tv1, tv2) if (tv1.get_typeset().size() == 0 or tv2.get_typeset().size() == 0): return "Error: empty type created when unifying {} and {}"\ .format(tv1, tv2) # Free -> Derived(Free) if not tv1.is_derived: typ.equivalent(tv1, tv2) return typ if (tv1.is_derived and TypeVar.is_bijection(tv1.derived_func)): inv_f = TypeVar.inverse_func(tv1.derived_func) return unify(tv1.base, normalize_tv(TypeVar.derived(tv2, inv_f)), typ) typ.add_constraint(TypesEqual(tv1, tv2)) return typ def move_first(l, i): # type: (List[T], int) -> List[T] return [l[i]] + l[:i] + l[i+1:] def ti_def(definition, typ): # type: (Def, TypeEnv) -> TypingOrError """ Perform type inference on one Def in the current type environment typ and return an updated type environment or error. At a high level this works by creating fresh copies of each formal type var in the Def's instruction's signature, and unifying the formal tv with the corresponding actual tv. """ expr = definition.expr inst = expr.inst # Create a dict m mapping each free typevar in the signature of definition # to a fresh copy of itself. free_formal_tvs = inst.all_typevars() m = {tv: tv.get_fresh_copy(str(typ.get_uid())) for tv in free_formal_tvs} # Update m with any explicitly bound type vars for (idx, bound_typ) in enumerate(expr.typevars): m[free_formal_tvs[idx]] = TypeVar.singleton(bound_typ) # Get fresh copies for each typevar in the signature (both free and # derived) fresh_formal_tvs = \ [subst(inst.outs[i].typevar, m) for i in inst.value_results] +\ [subst(inst.ins[i].typevar, m) for i in inst.value_opnums] # Get the list of actual Vars actual_vars = [] # type: List[Expr] actual_vars += [definition.defs[i] for i in inst.value_results] actual_vars += [expr.args[i] for i in inst.value_opnums] # Get the list of the actual TypeVars actual_tvs = [] for v in actual_vars: assert(isinstance(v, Var)) # Register with TypeEnv that this typevar corresponds ot variable v, # and thus has a given rank typ.register(v) actual_tvs.append(v.get_typevar()) # Make sure we unify the control typevar first. if inst.is_polymorphic: idx = fresh_formal_tvs.index(m[inst.ctrl_typevar]) fresh_formal_tvs = move_first(fresh_formal_tvs, idx) actual_tvs = move_first(actual_tvs, idx) # Unify each actual typevar with the corresponding fresh formal tv for (actual_tv, formal_tv) in zip(actual_tvs, fresh_formal_tvs): typ_or_err = unify(actual_tv, formal_tv, typ) err = get_error(typ_or_err) if (err): return "fail ti on {} <: {}: ".format(actual_tv, formal_tv) + err typ = get_type_env(typ_or_err) # Add any instruction specific constraints for constr in inst.constraints: typ.add_constraint(constr.translate(m)) return typ def ti_rtl(rtl, typ): # type: (Rtl, TypeEnv) -> TypingOrError """ Perform type inference on an Rtl in a starting type env typ. Return an updated type environment or error. """ for (i, d) in enumerate(rtl.rtl): assert (isinstance(d, Def)) typ_or_err = ti_def(d, typ) err = get_error(typ_or_err) # type: Optional[TypingError] if (err): return "On line {}: ".format(i) + err typ = get_type_env(typ_or_err) return typ def ti_xform(xform, typ): # type: (XForm, TypeEnv) -> TypingOrError """ Perform type inference on an Rtl in a starting type env typ. Return an updated type environment or error. """ typ_or_err = ti_rtl(xform.src, typ) err = get_error(typ_or_err) # type: Optional[TypingError] if (err): return "In src pattern: " + err typ = get_type_env(typ_or_err) typ_or_err = ti_rtl(xform.dst, typ) err = get_error(typ_or_err) if (err): return "In dst pattern: " + err typ = get_type_env(typ_or_err) return get_type_env(typ_or_err)<|fim▁end|>
if tv.is_derived: tv = TypeVar.derived(self[tv.base], tv.derived_func) return tv
<|file_name|>ZoneControlState.java<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2010-2018 by the respective copyright holders.<|fim▁hole|> * http://www.eclipse.org/legal/epl-v10.html */ package org.openhab.binding.yamahareceiver.internal.state; import static org.openhab.binding.yamahareceiver.YamahaReceiverBindingConstants.VALUE_EMPTY; /** * The state of a specific zone of a Yamaha receiver. * * @author David Graeff <[email protected]> * */ public class ZoneControlState { public boolean power = false; // User visible name of the input channel for the current zone public String inputName = VALUE_EMPTY; // The ID of the input channel that is used as xml tags (for example NET_RADIO, HDMI_1). // This may differ from what the AVR returns in Input/Input_Sel ("NET RADIO", "HDMI1") public String inputID = VALUE_EMPTY; public String surroundProgram = VALUE_EMPTY; public float volumeDB = 0.0f; // volume in dB public boolean mute = false; public int dialogueLevel = 0; }<|fim▁end|>
* * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at
<|file_name|>sequential_handler.go<|end_file_name|><|fim▁begin|>package dbus import ( "sync" ) // NewSequentialSignalHandler returns an instance of a new // signal handler that guarantees sequential processing of signals. It is a // guarantee of this signal handler that signals will be written to // channels in the order they are received on the DBus connection. func NewSequentialSignalHandler() SignalHandler { return &sequentialSignalHandler{} } type sequentialSignalHandler struct { mu sync.RWMutex closed bool signals []*sequentialSignalChannelData } func (sh *sequentialSignalHandler) DeliverSignal(intf, name string, signal *Signal) { sh.mu.RLock() defer sh.mu.RUnlock() if sh.closed { return } for _, scd := range sh.signals { scd.deliver(signal) } } func (sh *sequentialSignalHandler) Terminate() { sh.mu.Lock() defer sh.mu.Unlock() if sh.closed { return } for _, scd := range sh.signals { scd.close() close(scd.ch) } sh.closed = true sh.signals = nil } func (sh *sequentialSignalHandler) AddSignal(ch chan<- *Signal) { sh.mu.Lock() defer sh.mu.Unlock() if sh.closed { return } sh.signals = append(sh.signals, newSequentialSignalChannelData(ch)) } func (sh *sequentialSignalHandler) RemoveSignal(ch chan<- *Signal) { sh.mu.Lock() defer sh.mu.Unlock() if sh.closed { return } for i := len(sh.signals) - 1; i >= 0; i-- { if ch == sh.signals[i].ch { sh.signals[i].close() copy(sh.signals[i:], sh.signals[i+1:]) sh.signals[len(sh.signals)-1] = nil sh.signals = sh.signals[:len(sh.signals)-1] } } } type sequentialSignalChannelData struct { ch chan<- *Signal in chan *Signal done chan struct{} } func newSequentialSignalChannelData(ch chan<- *Signal) *sequentialSignalChannelData { scd := &sequentialSignalChannelData{ ch: ch,<|fim▁hole|> in: make(chan *Signal), done: make(chan struct{}), } go scd.bufferSignals() return scd } func (scd *sequentialSignalChannelData) bufferSignals() { defer close(scd.done) // Ensure that signals are delivered to scd.ch in the same // order they are received from scd.in. var queue []*Signal for { if len(queue) == 0 { signal, ok := <- scd.in if !ok { return } queue = append(queue, signal) } select { case scd.ch <- queue[0]: copy(queue, queue[1:]) queue[len(queue)-1] = nil queue = queue[:len(queue)-1] case signal, ok := <-scd.in: if !ok { return } queue = append(queue, signal) } } } func (scd *sequentialSignalChannelData) deliver(signal *Signal) { scd.in <- signal } func (scd *sequentialSignalChannelData) close() { close(scd.in) // Ensure that bufferSignals() has exited and won't attempt // any future sends on scd.ch <-scd.done }<|fim▁end|>
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian<|fim▁hole|>extern crate build; fn main() { build::link("xinput9_1_0", true) }<|fim▁end|>
// Licensed under the MIT License <LICENSE.md>
<|file_name|>test_settings.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------------- # # This file is part of pylibgimpplugin. # # Copyright (C) 2014 khalim19 <[email protected]> # # pylibgimpplugin is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pylibgimpplugin is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pylibgimpplugin. If not, see <http://www.gnu.org/licenses/>. # #------------------------------------------------------------------------------- #=============================================================================== from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals from __future__ import division str = unicode #=============================================================================== import errno from StringIO import StringIO import json import unittest import gimpenums from ..lib import mock from . import gimpmocks from .. import settings from .. import libfiles #=============================================================================== LIB_NAME = '.'.join(__name__.split('.')[:-2]) #=============================================================================== class MockStringIO(StringIO): def read(self): return self.getvalue() class MockGuiWidget(object): def __init__(self, value): self.value = value self.enabled = True self.visible = True class MockSettingPresenter(settings.SettingPresenter): @property def value(self): return self._element.value @value.setter def value(self, val): self._element.value = val @property def enabled(self): return self._element.enabled @enabled.setter def enabled(self, val): self._element.enabled = val @property def visible(self): return self._element.visible @visible.setter def visible(self, val): self._element.visible = val def connect_event(self, event_func, *event_args): pass def set_tooltip(self): pass class MockSettingPresenterContainer(settings.SettingPresenterContainer): def _gui_on_element_value_change(self, presenter): self._on_element_value_change(presenter) def _gui_on_element_value_change_streamline(self, presenter): self._on_element_value_change(presenter) class SettingContainerTest(settings.SettingContainer): def _create_settings(self): self._add(settings.StringSetting('file_extension', "")) self._add(settings.BoolSetting('ignore_invisible', False)) self._add( settings.EnumSetting( 'overwrite_mode', 'rename_new', [('replace', "Replace"), ('skip', "Skip"), ('rename_new', "Rename new file"), ('rename_existing', "Rename existing file")]) ) self['file_extension'].set_streamline_func(streamline_file_extension, self['ignore_invisible'])<|fim▁hole|> self['overwrite_mode'].set_streamline_func(streamline_overwrite_mode, self['ignore_invisible'], self['file_extension']) def streamline_file_extension(file_extension, ignore_invisible): if ignore_invisible.value: file_extension.value = "png" file_extension.ui_enabled = False else: file_extension.value = "jpg" file_extension.ui_enabled = True def streamline_overwrite_mode(overwrite_mode, ignore_invisible, file_extension): if ignore_invisible.value: overwrite_mode.value = overwrite_mode.options['skip'] file_extension.error_messages['custom'] = "custom error message" else: overwrite_mode.value = overwrite_mode.options['replace'] file_extension.error_messages['custom'] = "different custom error message" #=============================================================================== class TestSetting(unittest.TestCase): def setUp(self): self.setting = settings.Setting('file_extension', "") def test_changed_attributes(self): for attr, val in [('value', "png"), ('ui_enabled', False), ('ui_visible', True)]: setattr(self.setting, attr, val) for attr in ['value', 'ui_enabled', 'ui_visible']: self.assertTrue(attr in self.setting.changed_attributes, msg=("'" + attr + "' not in " + str(self.setting.changed_attributes))) def test_can_be_registered_to_pdb(self): self.setting.gimp_pdb_type = gimpenums.PDB_INT32 self.assertEqual(self.setting.can_be_registered_to_pdb, True) self.setting.gimp_pdb_type = None self.assertEqual(self.setting.can_be_registered_to_pdb, False) with self.assertRaises(ValueError): self.setting.gimp_pdb_type = None self.setting.can_be_registered_to_pdb = True def test_reset(self): setting = settings.Setting('file_extension', "") setting.value = "png" setting.reset() self.assertEqual(setting.value, "") def test_set_remove_streamline_func(self): with self.assertRaises(TypeError): self.setting.remove_streamline_func() with self.assertRaises(TypeError): self.setting.set_streamline_func(None) with self.assertRaises(TypeError): self.setting.set_streamline_func("this is not a function") def test_invalid_streamline(self): with self.assertRaises(TypeError): self.setting.streamline() def test_can_streamline(self): self.setting.set_streamline_func(streamline_file_extension) self.assertTrue(self.setting.can_streamline) self.setting.remove_streamline_func() self.assertFalse(self.setting.can_streamline) def test_streamline(self): ignore_invisible = settings.BoolSetting('ignore_invisible', False) self.setting.value = "gif" self.setting.set_streamline_func(streamline_file_extension, ignore_invisible) changed_settings = self.setting.streamline() self.assertTrue(self.setting in changed_settings) self.assertTrue('ui_enabled' in changed_settings[self.setting]) self.assertTrue('value' in changed_settings[self.setting]) self.assertEqual(self.setting.ui_enabled, True) self.assertEqual(self.setting.value, "jpg") def test_streamline_force(self): ignore_invisible = settings.BoolSetting('ignore_invisible', False) self.setting.set_streamline_func(streamline_file_extension, ignore_invisible) changed_settings = self.setting.streamline() self.assertEqual({}, changed_settings) changed_settings = self.setting.streamline(force=True) self.assertTrue(self.setting in changed_settings) class TestIntSetting(unittest.TestCase): def setUp(self): self.setting = settings.IntSetting('count', 0) self.setting.min_value = 0 self.setting.max_value = 100 def test_below_min(self): with self.assertRaises(settings.SettingValueError): self.setting.value = -5 def test_above_max(self): with self.assertRaises(settings.SettingValueError): self.setting.value = 200 class TestFloatSetting(unittest.TestCase): def setUp(self): self.setting = settings.FloatSetting('clip_percent', 0.0) self.setting.min_value = 0.0 self.setting.max_value = 100.0 def test_below_min(self): with self.assertRaises(settings.SettingValueError): self.setting.value = -5.0 try: self.setting.value = 0.0 except settings.SettingValueError: self.fail("`SettingValueError` should not be raised") def test_above_max(self): with self.assertRaises(settings.SettingValueError): self.setting.value = 200.0 try: self.setting.value = 100.0 except settings.SettingValueError: self.fail("`SettingValueError` should not be raised") class TestEnumSetting(unittest.TestCase): def setUp(self): self.setting_display_name = "Overwrite mode (non-interactive only)" self.setting = settings.EnumSetting( 'overwrite_mode', 'replace', [('skip', "Skip"), ('replace', "Replace")]) self.setting.display_name = self.setting_display_name def test_explicit_values(self): setting = settings.EnumSetting( 'overwrite_mode', 'replace', [('skip', "Skip", 5), ('replace', "Replace", 6)]) self.assertEqual(setting.options['skip'], 5) self.assertEqual(setting.options['replace'], 6) with self.assertRaises(ValueError): settings.EnumSetting( 'overwrite_mode', 'replace', [('skip', "Skip", 4), ('replace', "Replace")]) with self.assertRaises(ValueError): settings.EnumSetting( 'overwrite_mode', 'replace', [('skip', "Skip", 4), ('replace', "Replace", 4)]) def test_invalid_default_value(self): with self.assertRaises(ValueError): settings.EnumSetting( 'overwrite_mode', 'invalid_default_value', [('skip', "Skip"), ('replace', "Replace")]) def test_set_invalid_option(self): with self.assertRaises(settings.SettingValueError): self.setting.value = 4 with self.assertRaises(settings.SettingValueError): self.setting.value = -1 def test_get_invalid_option(self): with self.assertRaises(KeyError): self.setting.options['invalid_option'] def test_display_name(self): self.assertEqual(self.setting.display_name, self.setting_display_name) def test_short_description(self): self.assertEqual(self.setting.short_description, self.setting_display_name + " { Skip (0), Replace (1) }") def test_get_option_display_names_and_values(self): option_display_names_and_values = self.setting.get_option_display_names_and_values() self.assertEqual(option_display_names_and_values, ["Skip", 0, "Replace", 1]) class TestImageSetting(unittest.TestCase): def setUp(self): self.setting = settings.ImageSetting('image', None) @mock.patch(LIB_NAME + '.settings.pdb', new=gimpmocks.MockPDB()) def test_invalid_image(self): pdb = gimpmocks.MockPDB() image = pdb.gimp_image_new(2, 2, gimpenums.RGB) pdb.gimp_image_delete(image) with self.assertRaises(settings.SettingValueError): self.setting.value = image class TestFileExtensionSetting(unittest.TestCase): def setUp(self): self.setting = settings.FileExtensionSetting('file_ext', "png") def test_custom_error_message(self): self.setting.error_messages[libfiles.FileExtensionValidator.IS_EMPTY] = "My Custom Message" try: self.setting.value = "" except settings.SettingValueError as e: self.assertEqual(e.message, "My Custom Message") #=============================================================================== class TestSettingContainer(unittest.TestCase): def setUp(self): self.settings = SettingContainerTest() def test_get_setting_invalid_key(self): with self.assertRaises(KeyError): self.settings['invalid_key'] def test_streamline(self): self.settings.streamline(force=True) self.assertEqual(self.settings['file_extension'].value, "jpg") self.assertEqual(self.settings['file_extension'].ui_enabled, True) self.assertEqual(self.settings['overwrite_mode'].value, self.settings['overwrite_mode'].options['replace']) def test_reset(self): self.settings['overwrite_mode'].value = self.settings['overwrite_mode'].options['rename_new'] self.settings['file_extension'].value = "jpg" self.settings['file_extension'].can_be_reset_by_container = False self.settings.reset() self.assertEqual(self.settings['overwrite_mode'].value, self.settings['overwrite_mode'].default_value) self.assertNotEqual(self.settings['file_extension'].value, self.settings['file_extension'].default_value) self.assertEqual(self.settings['file_extension'].value, "jpg") #=============================================================================== class TestSettingPresenterContainer(unittest.TestCase): def setUp(self): self.settings = SettingContainerTest() self.element = MockGuiWidget("") self.setting_presenter = MockSettingPresenter(self.settings['file_extension'], self.element) self.presenters = MockSettingPresenterContainer() self.presenters.add(self.setting_presenter) self.presenters.add(MockSettingPresenter(self.settings['overwrite_mode'], MockGuiWidget(self.settings['overwrite_mode'].options['skip']))) self.presenters.add(MockSettingPresenter(self.settings['ignore_invisible'], MockGuiWidget(False))) def test_assign_setting_values_to_elements(self): self.settings['file_extension'].value = "png" self.settings['ignore_invisible'].value = True self.presenters.assign_setting_values_to_elements() self.assertEqual(self.presenters[self.settings['file_extension']].value, "png") self.assertEqual(self.presenters[self.settings['file_extension']].enabled, False) self.assertEqual(self.presenters[self.settings['ignore_invisible']].value, True) def test_assign_element_values_to_settings_with_streamline(self): self.presenters[self.settings['file_extension']].value = "jpg" self.presenters[self.settings['ignore_invisible']].value = True self.presenters.assign_element_values_to_settings() self.assertEqual(self.settings['file_extension'].value, "png") self.assertEqual(self.settings['file_extension'].ui_enabled, False) def test_assign_element_values_to_settings_no_streamline(self): # `value_changed_signal` is None, so no event handlers are invoked. self.presenters.connect_value_changed_events() self.presenters[self.settings['file_extension']].value = "jpg" self.presenters[self.settings['ignore_invisible']].value = True self.presenters.assign_element_values_to_settings() self.assertEqual(self.settings['file_extension'].value, "jpg") self.assertEqual(self.settings['file_extension'].ui_enabled, True) #=============================================================================== class TestShelfSettingStream(unittest.TestCase): @mock.patch(LIB_NAME + '.settings.gimpshelf.shelf', new=gimpmocks.MockGimpShelf()) def setUp(self): self.prefix = 'prefix' self.stream = settings.GimpShelfSettingStream(self.prefix) self.settings = SettingContainerTest() @mock.patch(LIB_NAME + '.settings.gimpshelf.shelf', new=gimpmocks.MockGimpShelf()) def test_write(self): self.settings['file_extension'].value = "png" self.settings['ignore_invisible'].value = True self.stream.write(self.settings) self.assertEqual(settings.gimpshelf.shelf[self.prefix + 'file_extension'], "png") self.assertEqual(settings.gimpshelf.shelf[self.prefix + 'ignore_invisible'], True) @mock.patch(LIB_NAME + '.settings.gimpshelf.shelf', new=gimpmocks.MockGimpShelf()) def test_read(self): settings.gimpshelf.shelf[self.prefix + 'file_extension'] = "png" settings.gimpshelf.shelf[self.prefix + 'ignore_invisible'] = True self.stream.read([self.settings['file_extension'], self.settings['ignore_invisible']]) self.assertEqual(self.settings['file_extension'].value, "png") self.assertEqual(self.settings['ignore_invisible'].value, True) @mock.patch(LIB_NAME + '.settings.gimpshelf.shelf', new=gimpmocks.MockGimpShelf()) def test_read_settings_not_found(self): with self.assertRaises(settings.SettingsNotFoundInStreamError): self.stream.read(self.settings) @mock.patch(LIB_NAME + '.settings.gimpshelf.shelf', new=gimpmocks.MockGimpShelf()) def test_read_invalid_setting_value(self): setting_with_invalid_value = settings.IntSetting('int', -1) setting_with_invalid_value.min_value = 0 self.stream.write([setting_with_invalid_value]) self.stream.read([setting_with_invalid_value]) self.assertEqual(setting_with_invalid_value.value, setting_with_invalid_value.default_value) @mock.patch('__builtin__.open') class TestJSONFileSettingStream(unittest.TestCase): def setUp(self): self.stream = settings.JSONFileSettingStream("/test/file") self.settings = SettingContainerTest() def test_write(self, mock_file): self.settings['file_extension'].value = "jpg" self.settings['ignore_invisible'].value = True mock_file.return_value.__enter__.return_value = MockStringIO() file_ = mock_file.return_value.__enter__.return_value self.stream.write(self.settings) settings = json.loads(file_.read()) self.assertEqual(self.settings['file_extension'].value, "jpg") self.assertEqual(self.settings['ignore_invisible'].value, True) def test_read(self, mock_file): self.settings['file_extension'].value = "jpg" self.settings['ignore_invisible'].value = True mock_file.return_value.__enter__.return_value = MockStringIO() self.stream.write(self.settings) self.stream.read(self.settings) self.assertEqual(self.settings['file_extension'].value, "jpg") self.assertEqual(self.settings['ignore_invisible'].value, True) def test_write_ioerror_oserror(self, mock_file): mock_file.side_effect = IOError("Whatever other I/O error it could be") with self.assertRaises(settings.SettingStreamWriteError): self.stream.write(self.settings) mock_file.side_effect = OSError("Permission denied or whatever other OS error it could be") with self.assertRaises(settings.SettingStreamWriteError): self.stream.write(self.settings) def test_read_ioerror_oserror(self, mock_file): mock_file.side_effect = IOError("File not found or whatever other I/O error it could be") with self.assertRaises(settings.SettingStreamReadError): self.stream.read(self.settings) mock_file.side_effect = OSError("Permission denied or whatever other OS error it could be") with self.assertRaises(settings.SettingStreamReadError): self.stream.read(self.settings) def test_read_invalid_file_extension(self, mock_file): mock_file.side_effect = ValueError("Invalid file format; must be JSON") with self.assertRaises(settings.SettingStreamInvalidFormatError): self.stream.read(self.settings) def test_read_invalid_setting_value(self, mock_file): mock_file.return_value.__enter__.return_value = MockStringIO() setting_with_invalid_value = settings.IntSetting('int', -1) setting_with_invalid_value.min_value = 0 self.stream.write([setting_with_invalid_value]) self.stream.read([setting_with_invalid_value]) self.assertEqual(setting_with_invalid_value.value, setting_with_invalid_value.default_value) def test_read_settings_not_found(self, mock_file): mock_file.return_value.__enter__.return_value = MockStringIO() self.stream.write([settings.IntSetting('int', -1)]) with self.assertRaises(settings.SettingsNotFoundInStreamError): self.stream.read(self.settings) #=============================================================================== @mock.patch('__builtin__.open') class TestSettingPersistor(unittest.TestCase): @mock.patch(LIB_NAME + '.settings.gimpshelf.shelf', new=gimpmocks.MockGimpShelf()) def setUp(self): self.settings = SettingContainerTest() self.first_stream = settings.GimpShelfSettingStream('') self.second_stream = settings.JSONFileSettingStream('filename') self.setting_persistor = settings.SettingPersistor([self.first_stream, self.second_stream], [self.first_stream, self.second_stream]) @mock.patch(LIB_NAME + '.settings.gimpshelf.shelf', new=gimpmocks.MockGimpShelf()) def test_load_save(self, mock_file): mock_file.return_value.__enter__.return_value = MockStringIO() self.settings['file_extension'].value = "png" self.settings['ignore_invisible'].value = True status = self.setting_persistor.save(self.settings) self.assertEqual(status, settings.SettingPersistor.SUCCESS) self.settings['file_extension'].value = "jpg" self.settings['ignore_invisible'].value = False status = self.setting_persistor.load(self.settings) self.assertEqual(status, settings.SettingPersistor.SUCCESS) self.assertEqual(self.settings['file_extension'].value, "png") self.assertEqual(self.settings['ignore_invisible'].value, True) @mock.patch(LIB_NAME + '.settings.gimpshelf.shelf', new=gimpmocks.MockGimpShelf()) def test_load_combine_settings_from_multiple_streams(self, mock_file): mock_file.return_value.__enter__.return_value = MockStringIO() self.settings['file_extension'].value = "png" self.settings['ignore_invisible'].value = True self.first_stream.write([self.settings['file_extension']]) self.settings['file_extension'].value = "jpg" self.second_stream.write([self.settings['ignore_invisible'], self.settings['file_extension']]) self.settings['file_extension'].value = "gif" self.settings['ignore_invisible'].value = False self.setting_persistor.load(self.settings) self.assertEqual(self.settings['file_extension'].value, "png") self.assertEqual(self.settings['ignore_invisible'].value, True) for setting in self.settings: if setting not in [self.settings['file_extension'], self.settings['ignore_invisible']]: self.assertEqual(setting.value, setting.default_value) @mock.patch(LIB_NAME + '.settings.gimpshelf.shelf', new=gimpmocks.MockGimpShelf()) def test_load_settings_file_not_found(self, mock_file): mock_file.return_value.__enter__.return_value = MockStringIO() mock_file.side_effect = IOError("File not found") mock_file.side_effect.errno = errno.ENOENT status = self.setting_persistor.load(self.settings) self.assertEqual(status, settings.SettingPersistor.NOT_ALL_SETTINGS_FOUND) @mock.patch(LIB_NAME + '.settings.gimpshelf.shelf', new=gimpmocks.MockGimpShelf()) def test_load_settings_not_found(self, mock_file): mock_file.return_value.__enter__.return_value = MockStringIO() self.first_stream.write([self.settings['ignore_invisible']]) self.second_stream.write([self.settings['file_extension'], self.settings['ignore_invisible']]) status = self.setting_persistor.load([self.settings['overwrite_mode']]) self.assertEqual(status, settings.SettingPersistor.NOT_ALL_SETTINGS_FOUND) @mock.patch(LIB_NAME + '.settings.gimpshelf.shelf', new=gimpmocks.MockGimpShelf()) def test_load_read_fail(self, mock_file): mock_file.return_value.__enter__.return_value = MockStringIO() status = self.setting_persistor.load(self.settings) self.assertEqual(status, settings.SettingPersistor.READ_FAIL) mock_file.side_effect = IOError() status = self.setting_persistor.load(self.settings) self.assertEqual(status, settings.SettingPersistor.READ_FAIL) mock_file.side_effect = OSError() status = self.setting_persistor.load(self.settings) self.assertEqual(status, settings.SettingPersistor.READ_FAIL) @mock.patch(LIB_NAME + '.settings.gimpshelf.shelf', new=gimpmocks.MockGimpShelf()) def test_save_write_fail(self, mock_file): mock_file.return_value.__enter__.return_value = MockStringIO() mock_file.side_effect = IOError() status = self.setting_persistor.save(self.settings) self.assertEqual(status, settings.SettingPersistor.WRITE_FAIL) mock_file.side_effect = OSError() status = self.setting_persistor.save(self.settings) self.assertEqual(status, settings.SettingPersistor.WRITE_FAIL)<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import { combineReducers } from 'redux' import userInfo from './userInfo' import userFeed from './userFeed' import popularFeed from './popularFeed' <|fim▁hole|>})<|fim▁end|>
export default combineReducers({ userInfo, userFeed, popularFeed
<|file_name|>django-admin.py<|end_file_name|><|fim▁begin|>#!/home/jojoriveraa/Dropbox/Capacitación/Platzi/Python-Django/NFCow/venv/bin/python3 from django.core import management<|fim▁hole|>if __name__ == "__main__": management.execute_from_command_line()<|fim▁end|>
<|file_name|>sha-text.js<|end_file_name|><|fim▁begin|><|fim▁hole|>// generate a hash from string var crypto = require('crypto'), text = 'hello bob', key = 'mysecret key' // create hahs var hash = crypto.createHmac('sha512', key) hash.update(text) var value = hash.digest('hex') // print result console.log(value);<|fim▁end|>
<|file_name|>DataDownloadCqlBuilderSpec.js<|end_file_name|><|fim▁begin|>describe("Portal.filter.combiner.DataDownloadCqlBuilder", function() { var builder; beforeEach(function() { var filters =[ { constructor: Portal.filter.GeometryFilter, // Is Geometry filter isVisualised: returns(true), hasValue: returns(true), getCql: returns('cql1') }, { isVisualised: returns(false), // Not visualised hasValue: returns(true), getCql: returns('cql2') }, { isVisualised: returns(true), hasValue: returns(false), // No value getCql: returns('cql3') }, { isVisualised: returns(true), hasValue: returns(true), getCql: returns('cql4') } ];<|fim▁hole|> filters: filters }); }); describe('buildCql', function() { it('returns correct CQL', function() { expect(builder.buildCql()).toBe('cql1 AND cql2 AND cql4'); }); }); });<|fim▁end|>
builder = new Portal.filter.combiner.DataDownloadCqlBuilder({
<|file_name|>price_import.py<|end_file_name|><|fim▁begin|>from decimal import Decimal from django.core.management.base import BaseCommand from openpyxl import load_workbook from contracts.models import PriceName, PriceCoast from directory.models import Researches class Command(BaseCommand): def add_arguments(self, parser): """ :param path - файл с картами пациентов + диагноз Д-учета """<|fim▁hole|> """ Испорт цен услуг Если услуга(id) существует записать в новый ф-л уже существующие, иначе создать новую запись :param args: :param kwargs: :return: """ fp = kwargs["path"] self.stdout.write("Path: " + fp) wb = load_workbook(filename=fp) ws = wb[wb.sheetnames[0]] starts = False identify = 0 price_code = 0 coast = 0 for row in ws.rows: cells = [str(x.value) for x in row] if not starts: if "id" in cells and "код_прайс" in cells and "цена" in cells: starts = True identify = cells.index("id") price_code = cells.index("код_прайс") coast = cells.index("цена") else: price_obj = PriceName.objects.filter(pk=int(cells[price_code])).first() research_obj = Researches.objects.filter(pk=int(cells[identify])).first() if cells[coast]: coast_value = Decimal(cells[coast]) if price_obj and research_obj: PriceCoast.objects.update_or_create(price_name=price_obj, research=research_obj, defaults={'coast': coast_value})<|fim▁end|>
parser.add_argument('path', type=str) def handle(self, *args, **kwargs):
<|file_name|>gcp_resourcemanager_project.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2017 Google # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # ---------------------------------------------------------------------------- # # *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** # # ---------------------------------------------------------------------------- # # This file is automatically generated by Magic Modules and manual # changes will be clobbered when the file is regenerated. # # Please read more about how to change this file at # https://www.github.com/GoogleCloudPlatform/magic-modules # # ---------------------------------------------------------------------------- from __future__ import absolute_import, division, print_function __metaclass__ = type ################################################################################ # Documentation ################################################################################ ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: gcp_resourcemanager_project description: - Represents a GCP Project. A project is a container for ACLs, APIs, App Engine Apps, VMs, and other Google Cloud Platform resources. short_description: Creates a GCP Project version_added: '2.8' author: Google Inc. (@googlecloudplatform) requirements: - python >= 2.6 - requests >= 2.18.4 - google-auth >= 1.3.0 options: state: description: - Whether the given object should exist in GCP choices: - present - absent default: present type: str name: description: - 'The user-assigned display name of the Project. It must be 4 to 30 characters. Allowed characters are: lowercase and uppercase letters, numbers, hyphen, single-quote, double-quote, space, and exclamation point.' required: false type: str labels: description: - The labels associated with this Project. - 'Label keys must be between 1 and 63 characters long and must conform to the following regular expression: `[a-z]([-a-z0-9]*[a-z0-9])?`.' - Label values must be between 0 and 63 characters long and must conform to the regular expression `([a-z]([-a-z0-9]*[a-z0-9])?)?`. - No more than 256 labels can be associated with a given resource. - Clients should store labels in a representation such as JSON that does not depend on specific characters being disallowed . required: false type: dict parent: description: - A parent organization. required: false type: dict suboptions: type: description: - Must be organization. required: false type: str id: description: - Id of the organization. required: false type: str id: description: - The unique, user-assigned ID of the Project. It must be 6 to 30 lowercase letters, digits, or hyphens. It must start with a letter. - Trailing hyphens are prohibited. required: true type: str project: description: - The Google Cloud Platform project to use. type: str auth_kind: description: - The type of credential used. type: str required: true choices: - application - machineaccount - serviceaccount service_account_contents: description: - The contents of a Service Account JSON file, either in a dictionary or as a JSON string that represents it. type: jsonarg service_account_file:<|fim▁hole|> - The path of a Service Account JSON file if serviceaccount is selected as type. type: path service_account_email: description: - An optional service account email address if machineaccount is selected and the user does not wish to use the default email. type: str scopes: description: - Array of scopes to be used type: list env_type: description: - Specifies which Ansible environment you're running this module within. - This should not be set unless you know what you're doing. - This only alters the User Agent string for any API requests. type: str ''' EXAMPLES = ''' - name: create a project gcp_resourcemanager_project: name: My Sample Project id: alextest-{{ 10000000000 | random }} auth_kind: serviceaccount service_account_file: "/tmp/auth.pem" parent: type: organization id: 636173955921 state: present ''' RETURN = ''' number: description: - Number uniquely identifying the project. returned: success type: int lifecycleState: description: - The Project lifecycle state. returned: success type: str name: description: - 'The user-assigned display name of the Project. It must be 4 to 30 characters. Allowed characters are: lowercase and uppercase letters, numbers, hyphen, single-quote, double-quote, space, and exclamation point.' returned: success type: str createTime: description: - Time of creation. returned: success type: str labels: description: - The labels associated with this Project. - 'Label keys must be between 1 and 63 characters long and must conform to the following regular expression: `[a-z]([-a-z0-9]*[a-z0-9])?`.' - Label values must be between 0 and 63 characters long and must conform to the regular expression `([a-z]([-a-z0-9]*[a-z0-9])?)?`. - No more than 256 labels can be associated with a given resource. - Clients should store labels in a representation such as JSON that does not depend on specific characters being disallowed . returned: success type: dict parent: description: - A parent organization. returned: success type: complex contains: type: description: - Must be organization. returned: success type: str id: description: - Id of the organization. returned: success type: str id: description: - The unique, user-assigned ID of the Project. It must be 6 to 30 lowercase letters, digits, or hyphens. It must start with a letter. - Trailing hyphens are prohibited. returned: success type: str ''' ################################################################################ # Imports ################################################################################ from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, remove_nones_from_dict, replace_resource_dict import json import time ################################################################################ # Main ################################################################################ def main(): """Main function""" module = GcpModule( argument_spec=dict( state=dict(default='present', choices=['present', 'absent'], type='str'), name=dict(type='str'), labels=dict(type='dict'), parent=dict(type='dict', options=dict(type=dict(type='str'), id=dict(type='str'))), id=dict(required=True, type='str'), ) ) if not module.params['scopes']: module.params['scopes'] = ['https://www.googleapis.com/auth/cloud-platform'] state = module.params['state'] fetch = fetch_resource(module, self_link(module)) changed = False if fetch: if state == 'present': if is_different(module, fetch): update(module, self_link(module)) fetch = fetch_resource(module, self_link(module)) changed = True else: delete(module, self_link(module)) fetch = {} changed = True else: if state == 'present': fetch = create(module, collection(module)) changed = True else: fetch = {} fetch.update({'changed': changed}) module.exit_json(**fetch) def create(module, link): auth = GcpSession(module, 'resourcemanager') return wait_for_operation(module, auth.post(link, resource_to_request(module))) def update(module, link): auth = GcpSession(module, 'resourcemanager') return wait_for_operation(module, auth.put(link, resource_to_request(module))) def delete(module, link): auth = GcpSession(module, 'resourcemanager') return wait_for_operation(module, auth.delete(link)) def resource_to_request(module): request = { u'projectId': module.params.get('id'), u'name': module.params.get('name'), u'labels': module.params.get('labels'), u'parent': ProjectParent(module.params.get('parent', {}), module).to_request(), } return_vals = {} for k, v in request.items(): if v or v is False: return_vals[k] = v return return_vals def fetch_resource(module, link, allow_not_found=True): auth = GcpSession(module, 'resourcemanager') return return_if_object(module, auth.get(link), allow_not_found) def self_link(module): return "https://cloudresourcemanager.googleapis.com/v1/projects/{id}".format(**module.params) def collection(module): return "https://cloudresourcemanager.googleapis.com/v1/projects".format(**module.params) def return_if_object(module, response, allow_not_found=False): # If not found, return nothing. if allow_not_found and response.status_code == 404: return None # If no content, return nothing. if response.status_code == 204: return None # SQL only: return on 403 if not exist if allow_not_found and response.status_code == 403: return None try: result = response.json() except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst: module.fail_json(msg="Invalid JSON response with error: %s" % inst) if navigate_hash(result, ['error', 'message']): module.fail_json(msg=navigate_hash(result, ['error', 'message'])) return result def is_different(module, response): request = resource_to_request(module) response = response_to_hash(module, response) # Remove all output-only from response. response_vals = {} for k, v in response.items(): if k in request: response_vals[k] = v request_vals = {} for k, v in request.items(): if k in response: request_vals[k] = v return GcpRequest(request_vals) != GcpRequest(response_vals) # Remove unnecessary properties from the response. # This is for doing comparisons with Ansible's current parameters. def response_to_hash(module, response): return { u'projectNumber': response.get(u'projectNumber'), u'lifecycleState': response.get(u'lifecycleState'), u'name': response.get(u'name'), u'createTime': response.get(u'createTime'), u'labels': response.get(u'labels'), u'parent': ProjectParent(response.get(u'parent', {}), module).from_response(), } def async_op_url(module, extra_data=None): if extra_data is None: extra_data = {} url = "https://cloudresourcemanager.googleapis.com/v1/{op_id}" combined = extra_data.copy() combined.update(module.params) return url.format(**combined) def wait_for_operation(module, response): op_result = return_if_object(module, response) if op_result is None: return {} status = navigate_hash(op_result, ['done']) wait_done = wait_for_completion(status, op_result, module) raise_if_errors(wait_done, ['error'], module) return navigate_hash(wait_done, ['response']) def wait_for_completion(status, op_result, module): op_id = navigate_hash(op_result, ['name']) op_uri = async_op_url(module, {'op_id': op_id}) while not status: raise_if_errors(op_result, ['error'], module) time.sleep(1.0) op_result = fetch_resource(module, op_uri, False) status = navigate_hash(op_result, ['done']) return op_result def raise_if_errors(response, err_path, module): errors = navigate_hash(response, err_path) if errors is not None: module.fail_json(msg=errors) class ProjectParent(object): def __init__(self, request, module): self.module = module if request: self.request = request else: self.request = {} def to_request(self): return remove_nones_from_dict({u'type': self.request.get('type'), u'id': self.request.get('id')}) def from_response(self): return remove_nones_from_dict({u'type': self.request.get(u'type'), u'id': self.request.get(u'id')}) if __name__ == '__main__': main()<|fim▁end|>
description:
<|file_name|>ConnectionPoint.py<|end_file_name|><|fim▁begin|>import Sea from Connection import Connection class ConnectionPoint(Connection): """ Class for point connections. """ def __init__(self, obj, system, components): Connection.__init__(self, obj, system, components) #obj.Sort = 'Point' def updateComponents(self, obj):<|fim▁hole|> if any([item.Vertexes for item in commons]): """ There is indeed a point connection. """ obj.Proxy.model.components = obj.Components obj.updateCouplings()<|fim▁end|>
connections = Sea.actions.connection.ShapeConnection([item.Shape for item in obj.Components]) commons = connections.commons()
<|file_name|>cpp_code_generator.rs<|end_file_name|><|fim▁begin|>use cpp_ffi_data::{QtSlotWrapper, CppIndirectionChange, CppAndFfiMethod, CppFfiArgumentMeaning, CppFfiHeaderData, CppFfiType, CppFieldAccessorType, CppFfiMethodKind}; use cpp_method::ReturnValueAllocationPlace; use cpp_type::{CppTypeIndirection, CppTypeBase, CppType}; use common::errors::{Result, ChainErr, unexpected}; use common::file_utils::{PathBufWithAdded, create_dir_all, create_file, path_to_str}; use common::string_utils::JoinWithSeparator; use common::utils::MapIfOk; use common::utils::get_command_output; use std::path::PathBuf; use std::iter::once; use std::process::Command; /// Generates C++ code for the C wrapper library. pub struct CppCodeGenerator { /// Library name lib_name: String, /// Uppercase library name (for optimization) lib_name_upper: String, /// Path to the directory where the library is generated lib_path: ::std::path::PathBuf, } impl CppCodeGenerator { /// Creates a generator for a library. /// lib_name: library name /// lib_path: path to the directory where the library is generated pub fn new(lib_name: String, lib_path: ::std::path::PathBuf) -> Self { CppCodeGenerator { lib_name: lib_name.clone(), lib_name_upper: lib_name.to_uppercase(), lib_path: lib_path, } } /// Generates function name, return type and arguments list /// as it appears in both function declaration and implementation. fn function_signature(&self, method: &CppAndFfiMethod) -> Result<String> { let mut arg_texts = Vec::new(); for arg in &method.c_signature.arguments { arg_texts.push(arg.to_cpp_code()?); } let name_with_args = format!("{}({})", method.c_name, arg_texts.join(", "));<|fim▁hole|> let r = if let CppTypeBase::FunctionPointer(..) = return_type.base { return_type.to_cpp_code(Some(&name_with_args))? } else { format!("{} {}", return_type.to_cpp_code(None)?, name_with_args) }; Ok(r) } /// Generates method declaration for the header. fn function_declaration(&self, method: &CppAndFfiMethod) -> Result<String> { Ok(format!("{}_EXPORT {};\n", self.lib_name_upper, self.function_signature(method)?)) } /// Generates code for a Qt slot wrapper fn qt_slot_wrapper(&self, wrapper: &QtSlotWrapper) -> Result<String> { let func_type = CppType { base: CppTypeBase::FunctionPointer(wrapper.function_type.clone()), indirection: CppTypeIndirection::None, is_const: false, is_const2: false, }; let method_args = wrapper .arguments .iter() .enumerate() .map_if_ok(|(num, t)| -> Result<_> { Ok(format!("{} arg{}", t.original_type.to_cpp_code(None)?, num)) })? .join(", "); let func_args = once("m_data".to_string()) .chain(wrapper .arguments .iter() .enumerate() .map_if_ok(|(num, t)| self.convert_type_to_ffi(t, format!("arg{}", num)))?) .join(", "); Ok(format!(include_str!("../templates/c_lib/qt_slot_wrapper.h"), class_name = &wrapper.class_name, func_arg = func_type.to_cpp_code(Some("func"))?, func_field = func_type.to_cpp_code(Some("m_func"))?, method_args = method_args, func_args = func_args)) } /// Generates code that wraps `expression` of type `type1.original_type` and /// converts it to type `type1.ffi_type` fn convert_type_to_ffi(&self, type1: &CppFfiType, expression: String) -> Result<String> { Ok(match type1.conversion { CppIndirectionChange::NoChange => expression, CppIndirectionChange::ValueToPointer => { format!("new {}({})", type1.original_type.base.to_cpp_code(None)?, expression) } CppIndirectionChange::ReferenceToPointer => format!("&{}", expression), CppIndirectionChange::QFlagsToUInt => format!("uint({})", expression), }) } /// Wraps `expression` returned by the original C++ method to /// convert it to return type of the FFI method. fn convert_return_type(&self, method: &CppAndFfiMethod, expression: String) -> Result<String> { let mut result = expression; match method.c_signature.return_type.conversion { CppIndirectionChange::NoChange => {} CppIndirectionChange::ValueToPointer => { match method.allocation_place { ReturnValueAllocationPlace::Stack => { return Err(unexpected("stack allocated wrappers are expected to return void").into()); } ReturnValueAllocationPlace::NotApplicable => { return Err(unexpected("ValueToPointer conflicts with NotApplicable").into()); } ReturnValueAllocationPlace::Heap => { // constructors are said to return values in parse result, // but in reality we use `new` which returns a pointer, // so no conversion is necessary for constructors. if !method.cpp_method.is_constructor() { result = format!("new {}({})", method.cpp_method.return_type.base.to_cpp_code(None)?, result); } } } } CppIndirectionChange::ReferenceToPointer => { result = format!("&{}", result); } CppIndirectionChange::QFlagsToUInt => { result = format!("uint({})", result); } } if method.allocation_place == ReturnValueAllocationPlace::Stack && !method.cpp_method.is_constructor() { if let Some(arg) = method .c_signature .arguments .iter() .find(|x| x.meaning == CppFfiArgumentMeaning::ReturnValue) { result = format!("new({}) {}({})", arg.name, method.cpp_method.return_type.base.to_cpp_code(None)?, result); } } Ok(result) } /// Generates code for values passed to the original C++ method. fn arguments_values(&self, method: &CppAndFfiMethod) -> Result<String> { let mut filled_arguments = vec![]; for (i, cpp_argument) in method.cpp_method.arguments.iter().enumerate() { if let Some(c_argument) = method .c_signature .arguments .iter() .find(|x| x.meaning == CppFfiArgumentMeaning::Argument(i as i8)) { let mut result = c_argument.name.clone(); match c_argument.argument_type.conversion { CppIndirectionChange::ValueToPointer | CppIndirectionChange::ReferenceToPointer => result = format!("*{}", result), CppIndirectionChange::NoChange => {} CppIndirectionChange::QFlagsToUInt => { let type_text = if cpp_argument.argument_type.indirection == CppTypeIndirection::Ref && cpp_argument.argument_type.is_const { let mut fake_type = cpp_argument.argument_type.clone(); fake_type.is_const = false; fake_type.indirection = CppTypeIndirection::None; fake_type.to_cpp_code(None)? } else { cpp_argument.argument_type.to_cpp_code(None)? }; result = format!("{}({})", type_text, result); } } filled_arguments.push(result); } else { panic!("Error: no positional argument found\n{:?}", method); } } Ok(filled_arguments.into_iter().join(", ")) } /// Generates code for the value returned by the FFI method. #[cfg_attr(feature="clippy", allow(collapsible_if))] fn returned_expression(&self, method: &CppAndFfiMethod) -> Result<String> { let result = if method.cpp_method.is_destructor() { if let Some(arg) = method .c_signature .arguments .iter() .find(|x| x.meaning == CppFfiArgumentMeaning::This) { format!("{}_call_destructor({})", self.lib_name, arg.name) } else { return Err(unexpected("no this arg in destructor").into()); } } else { let mut is_field_accessor = false; let result_without_args = if let Some(info) = method.cpp_method.class_info_if_constructor() { let class_type = &info.class_type; match method.allocation_place { ReturnValueAllocationPlace::Stack => { if let Some(arg) = method .c_signature .arguments .iter() .find(|x| x.meaning == CppFfiArgumentMeaning::ReturnValue) { format!("new({}) {}", arg.name, class_type.to_cpp_code()?) } else { return Err(unexpected(format!("return value argument not found\n{:?}", method)) .into()); } } ReturnValueAllocationPlace::Heap => format!("new {}", class_type.to_cpp_code()?), ReturnValueAllocationPlace::NotApplicable => { return Err(unexpected("NotApplicable in constructor").into()); } } } else { let scope_specifier = if let Some(ref class_membership) = method.cpp_method.class_membership { if class_membership.is_static { format!("{}::", class_membership.class_type.to_cpp_code()?) } else { if let Some(arg) = method .c_signature .arguments .iter() .find(|x| x.meaning == CppFfiArgumentMeaning::This) { format!("{}->", arg.name) } else { return Err(unexpected("no this arg in non-static method").into()); } } } else { "".to_string() }; let template_args = match method.cpp_method.template_arguments_values { Some(ref args) => { let mut texts = Vec::new(); for arg in args { texts.push(arg.to_cpp_code(None)?); } format!("<{}>", texts.join(", ")) } None => String::new(), }; if let CppFfiMethodKind::FieldAccessor { ref accessor_type, ref field_name, } = method.kind { is_field_accessor = true; if accessor_type == &CppFieldAccessorType::Setter { format!("{}{} = {}", scope_specifier, field_name, self.arguments_values(method)?) } else { format!("{}{}", scope_specifier, field_name) } } else { format!("{}{}{}", scope_specifier, method.cpp_method.name, template_args) } }; if is_field_accessor { result_without_args } else { format!("{}({})", result_without_args, self.arguments_values(method)?) } }; self.convert_return_type(method, result) } /// Generates body of the FFI method implementation. fn source_body(&self, method: &CppAndFfiMethod) -> Result<String> { if method.cpp_method.is_destructor() && method.allocation_place == ReturnValueAllocationPlace::Heap { if let Some(arg) = method .c_signature .arguments .iter() .find(|x| x.meaning == CppFfiArgumentMeaning::This) { Ok(format!("delete {};\n", arg.name)) } else { panic!("Error: no this argument found\n{:?}", method); } } else { Ok(format!("{}{};\n", if method.c_signature.return_type.ffi_type.is_void() { "" } else { "return " }, self.returned_expression(&method)?)) } } /// Generates implementation of the FFI method for the source file. fn function_implementation(&self, method: &CppAndFfiMethod) -> Result<String> { Ok(format!("{} {{\n {}}}\n\n", self.function_signature(method)?, self.source_body(&method)?)) } /// Generates main files and directories of the library. pub fn generate_template_files(&self, include_directives: &[PathBuf]) -> Result<()> { let name_upper = self.lib_name.to_uppercase(); let cmakelists_path = self.lib_path.with_added("CMakeLists.txt"); let mut cmakelists_file = create_file(&cmakelists_path)?; cmakelists_file .write(format!(include_str!("../templates/c_lib/CMakeLists.txt"), lib_name_lowercase = &self.lib_name, lib_name_uppercase = name_upper))?; let src_dir = self.lib_path.with_added("src"); create_dir_all(&src_dir)?; let include_dir = self.lib_path.with_added("include"); create_dir_all(&include_dir)?; let exports_file_path = include_dir.with_added(format!("{}_exports.h", &self.lib_name)); let mut exports_file = create_file(&exports_file_path)?; exports_file .write(format!(include_str!("../templates/c_lib/exports.h"), lib_name_uppercase = name_upper))?; let include_directives_code = include_directives .map_if_ok(|d| -> Result<_> { Ok(format!("#include \"{}\"", path_to_str(d)?)) })? .join("\n"); let global_file_path = include_dir.with_added(format!("{}_global.h", &self.lib_name)); let mut global_file = create_file(&global_file_path)?; global_file .write(format!(include_str!("../templates/c_lib/global.h"), lib_name_lowercase = &self.lib_name, lib_name_uppercase = name_upper, include_directives_code = include_directives_code))?; Ok(()) } /// Generates all regular files of the C++ wrapper library pub fn generate_files(&self, data: &[CppFfiHeaderData]) -> Result<()> { self .generate_all_headers_file(data.iter().map(|x| &x.include_file_base_name))?; for item in data { self .generate_one(item) .chain_err(|| "C++ code generator failed")?; } Ok(()) } /// Generates the header file that includes all other headers of the library. fn generate_all_headers_file<'a, I: Iterator<Item = &'a String>>(&self, names: I) -> Result<()> { let mut h_path = self.lib_path.clone(); h_path.push("include"); h_path.push(format!("{}.h", &self.lib_name)); let mut all_header_file = create_file(&h_path)?; all_header_file .write(format!("#ifndef {0}_H\n#define {0}_H\n\n", &self.lib_name_upper))?; for name in names { all_header_file .write(format!("#include \"{}_{}.h\"\n", &self.lib_name, name))?; } all_header_file .write(format!("#endif // {}_H\n", &self.lib_name_upper))?; Ok(()) } /// Generates a header file and a source file for a portion of data /// corresponding to a header file of original C++ library. fn generate_one(&self, data: &CppFfiHeaderData) -> Result<()> { let ffi_include_file = format!("{}_{}.h", &self.lib_name, data.include_file_base_name); let cpp_path = self .lib_path .with_added("src") .with_added(format!("{}_{}.cpp", &self.lib_name, data.include_file_base_name)); let h_path = self .lib_path .with_added("include") .with_added(&ffi_include_file); let mut cpp_file = create_file(&cpp_path)?; { let mut h_file = create_file(&h_path)?; cpp_file .write(format!("#include \"{}\"\n\n", ffi_include_file))?; let include_guard_name = ffi_include_file.replace(".", "_").to_uppercase(); h_file .write(format!("#ifndef {}\n#define {}\n\n", include_guard_name, include_guard_name))?; h_file .write(format!("#include \"{}_global.h\"\n\n", &self.lib_name))?; for wrapper in &data.qt_slot_wrappers { h_file.write(self.qt_slot_wrapper(wrapper)?)?; } h_file.write("extern \"C\" {\n\n")?; for method in &data.methods { h_file.write(self.function_declaration(method)?)?; cpp_file.write(self.function_implementation(method)?)?; } h_file.write("\n} // extern \"C\"\n\n")?; h_file .write(format!("#endif // {}\n", include_guard_name))?; } if !data.qt_slot_wrappers.is_empty() { let moc_output = get_command_output(Command::new("moc").arg("-i").arg(&h_path))?; cpp_file .write(format!("// start of MOC generated code\n{}\n// end of MOC generated code\n", moc_output))?; } Ok(()) } } /// Entry about a Rust struct with a buffer that must have the exact same size /// as its corresponding C++ class. This information is required for the C++ program /// that is launched by the build script to determine type sizes and generate `type_sizes.rs`. #[derive(Debug, Clone)] pub struct CppTypeSizeRequest { /// C++ code representing the type. Used as argument to `sizeof`. pub cpp_code: String, /// Name of the constant in `type_sizes.rs`. pub size_const_name: String, } /// Generates a C++ program that determines sizes of target C++ types /// on the current platform and outputs the Rust code for `type_sizes.rs` module /// to the standard output. pub fn generate_cpp_type_size_requester(requests: &[CppTypeSizeRequest], include_directives: &[PathBuf]) -> Result<String> { let mut result = Vec::new(); for dir in include_directives { result.push(format!("#include <{}>\n", path_to_str(dir)?)); } result.push("#include <iostream>\n\nint main() {\n".to_string()); for request in requests { result.push(format!(" std::cout << \"pub const {}: usize = \" << sizeof({}) << \";\\n\";\n", request.size_const_name, request.cpp_code)); } result.push("}\n".to_string()); Ok(result.join("")) }<|fim▁end|>
let return_type = &method.c_signature.return_type.ffi_type;
<|file_name|>HeadRequestArguments.java<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2010-2021 Contributors to the openHAB project * * See the NOTICE file(s) distributed with this work for additional * information. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 */ package org.openhab.binding.fronius.internal.api; import com.google.gson.annotations.SerializedName; /** * The {@link HeadRequestArguments} is responsible for storing * the "RequestArguments" node from the {@link Head} * * @author Thomas Rokohl - Initial contribution */ public class HeadRequestArguments { @SerializedName("DataCollection") private String dataCollection; @SerializedName("DeviceClass") private String deviceClass; @SerializedName("DeviceId") private String deviceId; @SerializedName("Scope") private String scope; public String getDataCollection() { if (null == dataCollection) { dataCollection = ""; } return dataCollection; } public void setDataCollection(String dataCollection) { this.dataCollection = dataCollection; } public String getDeviceClass() { if (null == deviceClass) { deviceClass = ""; } return deviceClass; } public void setDeviceClass(String deviceClass) { this.deviceClass = deviceClass; } public String getDeviceId() { if (null == deviceId) { deviceId = ""; } return deviceId; } public void setDeviceId(String deviceId) { this.deviceId = deviceId; } public String getScope() {<|fim▁hole|> return scope; } public void setScope(String scope) { this.scope = scope; } }<|fim▁end|>
if (null == scope) { scope = ""; }
<|file_name|>gtest_attribute.rs<|end_file_name|><|fim▁begin|>use proc_macro::TokenStream; use quote::{format_ident, quote, quote_spanned}; use syn::spanned::Spanned; /// The `gtest` macro can be placed on a function to make it into a Gtest unit test, when linked /// into a C++ binary that invokes Gtest. /// /// The `gtest` macro takes two arguments, which are Rust identifiers. The first is the name of the /// test suite and the second is the name of the test, each of which are converted to a string and /// given to Gtest. The name of the test function itself does not matter, and need not be unique /// (it's placed into a unique module based on the Gtest suite + test names. /// /// The test function must have no arguments. The return value must be either `()` or /// `std::result::Result<(), E>`. If another return type is found, the test will fail when run. If /// the return type is a `Result`, then an `Err` is treated as a test failure. /// /// # Examples /// ``` /// #[gtest(MathTest, Addition)] /// fn my_test() { /// expect_eq!(1 + 1, 2); /// } /// ``` /// /// The above adds the function to the Gtest binary as `MathTest.Addtition`: /// ``` /// [ RUN ] MathTest.Addition /// [ OK ] MathTest.Addition (0 ms) /// ``` /// /// A test with a Result return type, and which uses the `?` operator. It will fail if the test /// returns an `Err`, and print the resulting error string: /// ``` /// #[gtest(ResultTest, CheckThingWithResult)] /// fn my_test() -> std::result::Result<(), String> { /// call_thing_with_result()?; /// } /// ``` #[proc_macro_attribute] pub fn gtest(arg_stream: TokenStream, input: TokenStream) -> TokenStream { enum GtestAttributeArgument { TestSuite, TestName, } // Returns a string representation of an identifier argument to the attribute. For example, for // #[gtest(Foo, Bar)], this function would return "Foo" for position 0 and "Bar" for position 1. // If the argument is not a Rust identifier or not present, it returns a compiler error as a // TokenStream to be emitted. fn get_arg_string( args: &syn::AttributeArgs, which: GtestAttributeArgument, ) -> Result<String, TokenStream> { let pos = match which { GtestAttributeArgument::TestSuite => 0, GtestAttributeArgument::TestName => 1, }; match &args[pos] { syn::NestedMeta::Meta(syn::Meta::Path(path)) if path.segments.len() == 1 => { Ok(path.segments[0].ident.to_string()) } _ => { let error_stream = match which { GtestAttributeArgument::TestSuite => { quote_spanned! { args[pos].span() => compile_error!( "Expected a test suite name, written as an identifier." ); } } GtestAttributeArgument::TestName => { quote_spanned! { args[pos].span() => compile_error!( "Expected a test name, written as an identifier." ); } } }; Err(error_stream.into()) } } } let args = syn::parse_macro_input!(arg_stream as syn::AttributeArgs); let input_fn = syn::parse_macro_input!(input as syn::ItemFn); if let Some(asyncness) = input_fn.sig.asyncness { // TODO(crbug.com/1288947): We can support async functions once we have block_on() support // which will run a RunLoop until the async test completes. The run_test_fn just needs to be // generated to `block_on(|| #test_fn)` instead of calling `#test_fn` synchronously. return quote_spanned! { asyncness.span => compile_error!("async functions are not supported."); } .into(); } let (test_suite_name, test_name) = match args.len() { 2 => { let suite = match get_arg_string(&args, GtestAttributeArgument::TestSuite) { Ok(ok) => ok, Err(error_stream) => return error_stream, }; let test = match get_arg_string(&args, GtestAttributeArgument::TestName) { Ok(ok) => ok, Err(error_stream) => return error_stream, }; (suite, test) } 0 | 1 => { return quote! { compile_error!( "Expected two arguments. For example: #[gtest(TestSuite, TestName)]."); } .into(); } x => { return quote_spanned! { args[x.min(2)].span() => compile_error!( "Expected two arguments. For example: #[gtest(TestSuite, TestName)]."); } .into(); } }; // We put the test function and all the code we generate around it into a submodule which is // uniquely named for the super module based on the Gtest suite and test names. A result of this // is that if two tests have the same test suite + name, a compiler error would report the // conflict. let test_mod = format_ident!("__test_{}_{}", test_suite_name, test_name); // The run_test_fn identifier is marked #[no_mangle] to work around a codegen bug where the // function is seen as dead and the compiler omits it from the object files. Since it's // #[no_mangle], the identifier must be globally unique or we have an ODR violation. To produce // a unique identifier, we roll our own name mangling by combining the file name and path from // the source tree root with the Gtest suite and test names and the function itself. // // Note that an adversary could still produce a bug here by placing two equal Gtest suite and // names in a single .rs file but in separate inline submodules. // // TODO(danakj): Build a repro and file upstream bug to refer to. let mangled_function_name = |f: &syn::ItemFn| -> syn::Ident { let file_name = file!().replace(|c: char| !c.is_ascii_alphanumeric(), "_"); format_ident!("{}_{}_{}_{}", file_name, test_suite_name, test_name, f.sig.ident) }; let run_test_fn = format_ident!("run_test_{}", mangled_function_name(&input_fn)); // The identifier of the function which contains the body of the test. let test_fn = &input_fn.sig.ident; // Implements ToTokens to generate a reference to a static-lifetime, null-terminated, C-String // literal. It is represented as an array of type std::os::raw::c_char which can be either // signed or unsigned depending on the platform, and it can be passed directly to C++. This // differs from byte strings and CStr which work with `u8`. // // TODO(crbug.com/1298175): Would it make sense to write a c_str_literal!() macro that takes a // Rust string literal and produces a null-terminated array of `c_char`? Then you could write // `c_str_literal!(file!())` for example, or implement a `file_c_str!()` in this way. Explore // using https://crates.io/crates/cstr. // // TODO(danakj): Write unit tests for this, and consider pulling this out into its own crate, // if we don't replace it with c_str_literal!() or the "cstr" crate. struct CStringLiteral<'a>(&'a str); impl quote::ToTokens for CStringLiteral<'_> { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { let mut c_chars = self.0.chars().map(|c| c as std::os::raw::c_char).collect::<Vec<_>>(); c_chars.push(0); // Verify there's no embedded nulls as that would be invalid if the literal were put in // a std::ffi::CString. assert_eq!(c_chars.iter().filter(|x| **x == 0).count(), 1); let comment = format!("\"{}\" as [c_char]", self.0); tokens.extend(quote! { { #[doc=#comment] &[#(#c_chars as std::os::raw::c_char),*] } }); } } // C-compatible string literals, that can be inserted into the quote! macro. let test_suite_name_c_bytes = CStringLiteral(&test_suite_name);<|fim▁hole|> let file_c_bytes = CStringLiteral(file!()); let output = quote! { mod #test_mod { use super::*; use std::error::Error; use std::fmt::Display; use std::result::Result; #[::rust_gtest_interop::small_ctor::ctor] unsafe fn register_test() { let r = ::rust_gtest_interop::__private::TestRegistration { func: #run_test_fn, test_suite_name: #test_suite_name_c_bytes, test_name: #test_name_c_bytes, file: #file_c_bytes, line: line!(), }; ::rust_gtest_interop::__private::register_test(r); } // The function is extern "C" so `register_test()` can pass this fn as a pointer to C++ // where it's registered with gtest. // // TODO(crbug.com/1296284): Removing #[no_mangle] makes rustc drop the symbol for the // test function in the generated rlib which produces linker errors. If we resolve the // linked bug and emit real object files from rustc for linking, then all the required // symbols are present and `#[no_mangle]` should go away along with the custom-mangling // of `run_test_fn`. We can not use `pub` to resolve this unfortunately. When `#[used]` // is fixed in https://github.com/rust-lang/rust/issues/47384, this may also be // resolved as well. #[no_mangle] extern "C" fn #run_test_fn() { let catch_result = std::panic::catch_unwind(|| #test_fn()); use ::rust_gtest_interop::TestResult; let err_message: Option<String> = match catch_result { Ok(fn_result) => TestResult::into_error_message(fn_result), Err(_) => Some("Test panicked".to_string()), }; if let Some(m) = err_message.as_ref() { ::rust_gtest_interop::__private::add_failure_at(file!(), line!(), &m); } } #input_fn } }; output.into() }<|fim▁end|>
let test_name_c_bytes = CStringLiteral(&test_name);
<|file_name|>mysqlBaseTestCase.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python # -*- mode: python; indent-tabs-mode: nil; -*- # vim:expandtab:shiftwidth=2:tabstop=2:smarttab: # # Copyright (C) 2011 Patrick Crews # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import unittest import os import time import difflib import subprocess import MySQLdb servers = None class mysqlBaseTestCase(unittest.TestCase): def setUp(self): """ If we need to do anything pre-test, we do it here. Any code here is executed before any test method we<|fim▁hole|> self.servers = servers return def tearDown(self): #server_manager.reset_servers(test_executor.name) queries = ["DROP SCHEMA IF EXISTS test" ,"CREATE SCHEMA IF NOT EXISTS test" ] for server in self.servers: retcode, result = self.execute_queries(queries, server, schema='mysql') self.assertEqual(retcode,0,result) # Begin our utility code here # This is where we add methods that enable a test to do magic : ) def execute_cmd(self, cmd, stdout_path, exec_path=None, get_output=False): stdout_file = open(stdout_path,'w') cmd_subproc = subprocess.Popen( cmd , shell=True , cwd=exec_path , stdout = stdout_file , stderr = subprocess.STDOUT ) cmd_subproc.wait() retcode = cmd_subproc.returncode stdout_file.close() if get_output: data_file = open(stdout_path,'r') output = ''.join(data_file.readlines()) else: output = None return retcode, output def get_tables(self, server, schema): """ Return a list of the tables in the schema on the server """ results = [] query = "SHOW TABLES IN %s" %(schema) retcode, table_set = self.execute_query(query, server) for table_data in table_set: table_name = table_data[0] results.append(table_name) return results def check_slaves_by_query( self , master_server , other_servers , query , expected_result = None ): """ We execute the query across all servers and return a dict listing any diffs found, None if all is good. If a user provides an expected_result, we will skip executing against the master This is done as it is assumed the expected result has been generated / tested against the master """ comp_results = {} if expected_result: pass # don't bother getting it else: # run against master for 'good' value retcode, expected_result = self.execute_query(query, master_server) for server in other_servers: retcode, slave_result = self.execute_query(query, server) #print "%s: expected_result= %s | slave_result= %s" % ( server.name # , expected_result # , slave_result_ # ) if not expected_result == slave_result: comp_data = "%s: expected_result= %s | slave_result= %s" % ( server.name , expected_result , slave_result ) if comp_results.has_key(server.name): comp_results[server.name].append(comp_data) else: comp_results[server.name]=[comp_data] if comp_results: return comp_results return None def check_slaves_by_checksum( self , master_server , other_servers , schemas=['test'] , tables=[] ): """ We compare the specified tables (default = all) from the specified schemas between the 'master' and the other servers provided (via list) via CHECKSUM We return a dictionary listing the server and any tables that differed """ comp_results = {} for server in other_servers: for schema in schemas: for table in self.get_tables(master_server, schema): query = "CHECKSUM TABLE %s.%s" %(schema, table) retcode, master_checksum = self.execute_query(query, master_server) retcode, slave_checksum = self.execute_query(query, server) #print "%s: master_checksum= %s | slave_checksum= %s" % ( table # , master_checksum # , slave_checksum # ) if not master_checksum == slave_checksum: comp_data = "%s: master_checksum= %s | slave_checksum= %s" % ( table , master_checksum , slave_checksum ) if comp_results.has_key(server.name): comp_results[server.name].append(comp_data) else: comp_results[server.name]=[comp_data] if comp_results: return comp_results return None def take_mysqldump( self , server , databases=[] , tables=[] , dump_path = None , cmd_root = None): """ Take a mysqldump snapshot of the given server, storing the output to dump_path """ if not dump_path: dump_path = os.path.join(server.vardir, 'dumpfile.dat') if cmd_root: dump_cmd = cmd_root else: dump_cmd = "%s --no-defaults --user=root --port=%d --host=127.0.0.1 --protocol=tcp --result-file=%s" % ( server.mysqldump , server.master_port , dump_path ) if databases: if len(databases) > 1: # We have a list of db's that are to be dumped so we handle things dump_cmd = ' '.join([dump_cmd, '--databases', ' '.join(databases)]) else: dump_cmd = ' '.join([dump_cmd, databases[0], ' '.join(tables)]) self.execute_cmd(dump_cmd, os.devnull) def diff_dumpfiles(self, orig_file_path, new_file_path): """ diff two dumpfiles useful for comparing servers """ orig_file = open(orig_file_path,'r') restored_file = open(new_file_path,'r') orig_file_data = [] rest_file_data = [] orig_file_data= self.filter_data(orig_file.readlines(),'Dump completed') rest_file_data= self.filter_data(restored_file.readlines(),'Dump completed') server_diff = difflib.unified_diff( orig_file_data , rest_file_data , fromfile=orig_file_path , tofile=new_file_path ) diff_output = [] for line in server_diff: diff_output.append(line) output = '\n'.join(diff_output) orig_file.close() restored_file.close() return (diff_output==[]), output def filter_data(self, input_data, filter_text ): return_data = [] for line in input_data: if filter_text in line.strip(): pass else: return_data.append(line) return return_data def execute_query( self , query , server , schema='test'): try: conn = MySQLdb.connect( host = '127.0.0.1' , port = server.master_port , user = 'root' , db = schema) cursor = conn.cursor() cursor.execute(query) result_set = cursor.fetchall() cursor.close() except MySQLdb.Error, e: return 1, ("Error %d: %s" %(e.args[0], e.args[1])) conn.commit() conn.close() return 0, result_set def execute_queries( self , query_list , server , schema= 'test'): """ Execute a set of queries as a single transaction """ results = {} retcode = 0 try: conn = MySQLdb.connect( host = '127.0.0.1' , port = server.master_port , user = 'root' , db = schema) cursor = conn.cursor() for idx, query in enumerate(query_list): try: cursor.execute(query) result_set = cursor.fetchall() except MySQLdb.Error, e: result_set = "Error %d: %s" %(e.args[0], e.args[1]) retcode = 1 finally: results[query+str(idx)] = result_set conn.commit() cursor.close() conn.close() except Exception, e: retcode = 1 results = (Exception, e) finally: return retcode, results def execute_randgen(self, test_cmd, test_executor, servers, schema='test'): randgen_outfile = os.path.join(test_executor.logdir,'randgen.out') randgen_output = open(randgen_outfile,'w') server_type = test_executor.master_server.type if server_type in ['percona','galera']: # it is mysql for dbd::perl purposes server_type = 'mysql' dsn = "--dsn=dbi:%s:host=127.0.0.1:port=%d:user=root:password="":database=%s" %( server_type , servers[0].master_port , schema) randgen_cmd = " ".join([test_cmd, dsn]) randgen_subproc = subprocess.Popen( randgen_cmd , shell=True , cwd=test_executor.system_manager.randgen_path , env=test_executor.working_environment , stdout = randgen_output , stderr = subprocess.STDOUT ) randgen_subproc.wait() retcode = randgen_subproc.returncode randgen_output.close() randgen_file = open(randgen_outfile,'r') output = ''.join(randgen_file.readlines()) randgen_file.close() if retcode == 0: if not test_executor.verbose: output = None return retcode, output<|fim▁end|>
may execute """
<|file_name|>harddrive_enumerator.py<|end_file_name|><|fim▁begin|>import win32api import os import sys import subprocess import logging from itertools import izip_longest #itertools recipe def grouper(n, iterable, fillvalue=None): "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx" args = [iter(iterable)] * n return izip_longest(fillvalue=fillvalue, *args) def harddrive_enumerator(): """ Generator to get all (fixed) drive letters in the computers Returns tuples of (DriveName, VolumeName) - eg. ("D:", "Samsung Station") """ logger = logging.getLogger("keepitup") drivesDetailedList = [] if sys.platform == "win32": logger.debug("Enumerating win32 hard drives") getDrivesProc = subprocess.Popen('wmic logicaldisk where drivetype=3 get name, VolumeName /format:list', shell=True, stdout=subprocess.PIPE) output, err = getDrivesProc.communicate() logger.debug("Enumerated hard drives output: %s", output) drivesDetailedList = output.split(os.linesep) elif sys.platform in ["linux2", "darwin"]: logger.debug("Enumerating linux/osx hard drives") raise NotImplementedError() else: logger.error("Cannot enumeratre hard drives - unrecognized OS: %s", sys.platform)<|fim▁hole|> volumeName = volumeName[len("VolumeName="):].strip() yield name, volumeName<|fim▁end|>
raise NotImplementedError() for name, volumeName in grouper(2, drivesDetailedList): if "Name=" in name and "VolumeName" in volumeName: name = name[len("Name="):].strip()
<|file_name|>os_c.py<|end_file_name|><|fim▁begin|>import platform class OSCollector(object): def __init__(self, docker_client=None):<|fim▁hole|> def _zip_fields_values(self, keys, values): data = {} for key, value in zip(keys, values): if len(value) > 0: data[key] = value else: data[key] = None return data def _get_docker_version(self): data = {} if platform.system() == 'Linux': version = "Unknown" if self.docker_client: ver_resp = self.docker_client.version() version = "Docker version {0}, build {1}".format( ver_resp.get("Version", "Unknown"), ver_resp.get("GitCommit", "Unknown")) data['dockerVersion'] = version return data def _get_os(self): data = {} if platform.system() == 'Linux': info = platform.linux_distribution() keys = ["distribution", "version", "versionDescription"] data = self._zip_fields_values(keys, info) data['kernelVersion'] = \ platform.release() if len(platform.release()) > 0 else None return data def get_data(self): data = self._get_os() data.update(self._get_docker_version()) return data<|fim▁end|>
self.docker_client = docker_client def key_name(self): return "osInfo"
<|file_name|>0002_populate_users.py<|end_file_name|><|fim▁begin|># encoding: utf-8 import datetime from south.db import db from south.v2 import DataMigration from django.db import models class Migration(DataMigration): def forwards(self, orm): for user in orm['auth.user'].objects.all(): notification = orm.Notification() notification.user = user notification.save() def backwards(self, orm): orm['notification.notification'].objects.all().delete() models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'},<|fim▁hole|> 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'notification.notification': { 'Meta': {'object_name': 'Notification'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'notify_bug_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'notify_invoices_to_send': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'notify_late_invoices': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}) } } complete_apps = ['notification']<|fim▁end|>
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
<|file_name|>test_streams.py<|end_file_name|><|fim▁begin|>"""Tests for streams.py""" import asyncio import unittest from unittest import mock from aiohttp import streams from aiohttp import test_utils class TestStreamReader(unittest.TestCase): DATA = b'line1\nline2\nline3\n' def setUp(self): self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) def tearDown(self): self.loop.close() def _make_one(self, *args, **kwargs): return streams.StreamReader(loop=self.loop, *args, **kwargs) def test_create_waiter(self): stream = self._make_one() stream._waiter = asyncio.Future(loop=self.loop) self.assertRaises(RuntimeError, stream._create_waiter, 'test') @mock.patch('aiohttp.streams.asyncio') def test_ctor_global_loop(self, m_asyncio): stream = streams.StreamReader() self.assertIs(stream._loop, m_asyncio.get_event_loop.return_value) def test_at_eof(self): stream = self._make_one() self.assertFalse(stream.at_eof()) stream.feed_data(b'some data\n') self.assertFalse(stream.at_eof()) self.loop.run_until_complete(stream.readline()) self.assertFalse(stream.at_eof()) stream.feed_data(b'some data\n') stream.feed_eof() self.loop.run_until_complete(stream.readline()) self.assertTrue(stream.at_eof()) def test_wait_eof(self): stream = self._make_one() wait_task = asyncio.Task(stream.wait_eof(), loop=self.loop) def cb(): yield from asyncio.sleep(0.1, loop=self.loop) stream.feed_eof() asyncio.Task(cb(), loop=self.loop) self.loop.run_until_complete(wait_task) self.assertTrue(stream.is_eof()) self.assertIsNone(stream._eof_waiter) def test_wait_eof_eof(self): stream = self._make_one() stream.feed_eof() wait_task = asyncio.Task(stream.wait_eof(), loop=self.loop) self.loop.run_until_complete(wait_task) self.assertTrue(stream.is_eof()) def test_feed_empty_data(self): stream = self._make_one() stream.feed_data(b'') stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_feed_nonempty_data(self): stream = self._make_one() stream.feed_data(self.DATA) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(self.DATA, data) def test_read_zero(self): # Read zero bytes. stream = self._make_one() stream.feed_data(self.DATA) data = self.loop.run_until_complete(stream.read(0)) self.assertEqual(b'', data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(self.DATA, data) def test_read(self): # Read bytes. stream = self._make_one() read_task = asyncio.Task(stream.read(30), loop=self.loop) def cb(): stream.feed_data(self.DATA) self.loop.call_soon(cb) data = self.loop.run_until_complete(read_task) self.assertEqual(self.DATA, data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_read_line_breaks(self): # Read bytes without line breaks. stream = self._make_one() stream.feed_data(b'line1') stream.feed_data(b'line2') data = self.loop.run_until_complete(stream.read(5)) self.assertEqual(b'line1', data) data = self.loop.run_until_complete(stream.read(5)) self.assertEqual(b'line2', data) def test_read_eof(self): # Read bytes, stop at eof. stream = self._make_one() read_task = asyncio.Task(stream.read(1024), loop=self.loop) def cb(): stream.feed_eof() self.loop.call_soon(cb) data = self.loop.run_until_complete(read_task) self.assertEqual(b'', data) data = self.loop.run_until_complete(stream.read()) self.assertIs(data, streams.EOF_MARKER) @mock.patch('aiohttp.streams.internal_logger') def test_read_eof_infinit(self, internal_logger): # Read bytes. stream = self._make_one() stream.feed_eof() self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.loop.run_until_complete(stream.read()) self.assertTrue(internal_logger.warning.called) def test_read_until_eof(self): # Read all bytes until eof. stream = self._make_one() read_task = asyncio.Task(stream.read(-1), loop=self.loop) def cb(): stream.feed_data(b'chunk1\n') stream.feed_data(b'chunk2') stream.feed_eof() self.loop.call_soon(cb) data = self.loop.run_until_complete(read_task) self.assertEqual(b'chunk1\nchunk2', data) data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_read_exception(self): stream = self._make_one() stream.feed_data(b'line\n') data = self.loop.run_until_complete(stream.read(2)) self.assertEqual(b'li', data) stream.set_exception(ValueError()) self.assertRaises( ValueError, self.loop.run_until_complete, stream.read(2)) def test_readline(self): # Read one line. 'readline' will need to wait for the data # to come from 'cb' stream = self._make_one() stream.feed_data(b'chunk1 ') read_task = asyncio.Task(stream.readline(), loop=self.loop) def cb(): stream.feed_data(b'chunk2 ') stream.feed_data(b'chunk3 ') stream.feed_data(b'\n chunk4') self.loop.call_soon(cb) line = self.loop.run_until_complete(read_task) self.assertEqual(b'chunk1 chunk2 chunk3 \n', line) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b' chunk4', data) def test_readline_limit_with_existing_data(self): # Read one line. The data is in StreamReader's buffer # before the event loop is run. stream = self._make_one(limit=3) stream.feed_data(b'li') stream.feed_data(b'ne1\nline2\n') self.assertRaises( ValueError, self.loop.run_until_complete, stream.readline()) # The buffer should contain the remaining data after exception stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'line2\n', data) def test_readline_limit(self): # Read one line. StreamReaders are fed with data after # their 'readline' methods are called. stream = self._make_one(limit=7) def cb(): stream.feed_data(b'chunk1') stream.feed_data(b'chunk2') stream.feed_data(b'chunk3\n') stream.feed_eof() self.loop.call_soon(cb) self.assertRaises( ValueError, self.loop.run_until_complete, stream.readline()) stream = self._make_one(limit=7) def cb(): stream.feed_data(b'chunk1') stream.feed_data(b'chunk2\n') stream.feed_data(b'chunk3\n') stream.feed_eof() self.loop.call_soon(cb) self.assertRaises( ValueError, self.loop.run_until_complete, stream.readline()) data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'chunk3\n', data) def test_readline_nolimit_nowait(self): # All needed data for the first 'readline' call will be # in the buffer. stream = self._make_one() stream.feed_data(self.DATA[:6]) stream.feed_data(self.DATA[6:]) line = self.loop.run_until_complete(stream.readline()) self.assertEqual(b'line1\n', line) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'line2\nline3\n', data) def test_readline_eof(self): stream = self._make_one() stream.feed_data(b'some data') stream.feed_eof() line = self.loop.run_until_complete(stream.readline()) self.assertEqual(b'some data', line) def test_readline_empty_eof(self): stream = self._make_one() stream.feed_eof() line = self.loop.run_until_complete(stream.readline()) self.assertEqual(b'', line) self.assertIs(line, streams.EOF_MARKER) def test_readline_read_byte_count(self): stream = self._make_one() stream.feed_data(self.DATA) self.loop.run_until_complete(stream.readline()) data = self.loop.run_until_complete(stream.read(7)) self.assertEqual(b'line2\nl', data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'ine3\n', data) def test_readline_exception(self): stream = self._make_one() stream.feed_data(b'line\n') data = self.loop.run_until_complete(stream.readline()) self.assertEqual(b'line\n', data) stream.set_exception(ValueError()) self.assertRaises( ValueError, self.loop.run_until_complete, stream.readline()) def test_readexactly_zero_or_less(self): # Read exact number of bytes (zero or less). stream = self._make_one() stream.feed_data(self.DATA) data = self.loop.run_until_complete(stream.readexactly(0)) self.assertEqual(b'', data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(self.DATA, data) stream = self._make_one() stream.feed_data(self.DATA) data = self.loop.run_until_complete(stream.readexactly(-1)) self.assertEqual(b'', data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(self.DATA, data) def test_readexactly(self): # Read exact number of bytes. stream = self._make_one() n = 2 * len(self.DATA) read_task = asyncio.Task(stream.readexactly(n), loop=self.loop) def cb(): stream.feed_data(self.DATA) stream.feed_data(self.DATA) stream.feed_data(self.DATA) self.loop.call_soon(cb) data = self.loop.run_until_complete(read_task) self.assertEqual(self.DATA + self.DATA, data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(self.DATA, data) def test_readexactly_eof(self): # Read exact number of bytes (eof). stream = self._make_one() n = 2 * len(self.DATA) read_task = asyncio.Task(stream.readexactly(n), loop=self.loop) def cb(): stream.feed_data(self.DATA) stream.feed_eof() self.loop.call_soon(cb) <|fim▁hole|> self.assertEqual(str(cm.exception), '18 bytes read on a total of 36 expected bytes') data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_readexactly_exception(self): stream = self._make_one() stream.feed_data(b'line\n') data = self.loop.run_until_complete(stream.readexactly(2)) self.assertEqual(b'li', data) stream.set_exception(ValueError()) self.assertRaises( ValueError, self.loop.run_until_complete, stream.readexactly(2)) def test_exception(self): stream = self._make_one() self.assertIsNone(stream.exception()) exc = ValueError() stream.set_exception(exc) self.assertIs(stream.exception(), exc) def test_exception_waiter(self): stream = self._make_one() @asyncio.coroutine def set_err(): stream.set_exception(ValueError()) t1 = asyncio.Task(stream.readline(), loop=self.loop) t2 = asyncio.Task(set_err(), loop=self.loop) self.loop.run_until_complete(asyncio.wait([t1, t2], loop=self.loop)) self.assertRaises(ValueError, t1.result) def test_exception_cancel(self): stream = self._make_one() @asyncio.coroutine def read_a_line(): yield from stream.readline() t = asyncio.Task(read_a_line(), loop=self.loop) test_utils.run_briefly(self.loop) t.cancel() test_utils.run_briefly(self.loop) # The following line fails if set_exception() isn't careful. stream.set_exception(RuntimeError('message')) test_utils.run_briefly(self.loop) self.assertIs(stream._waiter, None) def test_readany_eof(self): stream = self._make_one() read_task = asyncio.Task(stream.readany(), loop=self.loop) self.loop.call_soon(stream.feed_data, b'chunk1\n') data = self.loop.run_until_complete(read_task) self.assertEqual(b'chunk1\n', data) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_readany_empty_eof(self): stream = self._make_one() stream.feed_eof() read_task = asyncio.Task(stream.readany(), loop=self.loop) data = self.loop.run_until_complete(read_task) self.assertEqual(b'', data) self.assertIs(data, streams.EOF_MARKER) def test_readany_exception(self): stream = self._make_one() stream.feed_data(b'line\n') data = self.loop.run_until_complete(stream.readany()) self.assertEqual(b'line\n', data) stream.set_exception(ValueError()) self.assertRaises( ValueError, self.loop.run_until_complete, stream.readany()) def test_read_nowait(self): stream = self._make_one() stream.feed_data(b'line1\nline2\n') self.assertEqual( stream.read_nowait(), b'line1\nline2\n') self.assertIs( stream.read_nowait(), streams.EOF_MARKER) stream.feed_eof() data = self.loop.run_until_complete(stream.read()) self.assertEqual(b'', data) def test_read_nowait_exception(self): stream = self._make_one() stream.feed_data(b'line\n') stream.set_exception(ValueError()) self.assertRaises(ValueError, stream.read_nowait) def test_read_nowait_waiter(self): stream = self._make_one() stream.feed_data(b'line\n') stream._waiter = stream._create_waiter('readany') self.assertRaises(RuntimeError, stream.read_nowait) class TestEmptyStreamReader(unittest.TestCase): def setUp(self): self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) def tearDown(self): self.loop.close() def test_empty_stream_reader(self): s = streams.EmptyStreamReader() self.assertIsNone(s.set_exception(ValueError())) self.assertIsNone(s.exception()) self.assertIsNone(s.feed_eof()) self.assertIsNone(s.feed_data(b'data')) self.assertTrue(s.at_eof()) self.assertIsNone( self.loop.run_until_complete(s.wait_eof())) self.assertIs( self.loop.run_until_complete(s.read()), streams.EOF_MARKER) self.assertIs( self.loop.run_until_complete(s.readline()), streams.EOF_MARKER) self.assertIs( self.loop.run_until_complete(s.readany()), streams.EOF_MARKER) self.assertRaises( asyncio.IncompleteReadError, self.loop.run_until_complete, s.readexactly(10)) self.assertIs(s.read_nowait(), streams.EOF_MARKER) class DataQueueMixin: def test_is_eof(self): self.assertFalse(self.buffer.is_eof()) self.buffer.feed_eof() self.assertTrue(self.buffer.is_eof()) def test_at_eof(self): self.assertFalse(self.buffer.at_eof()) self.buffer.feed_eof() self.assertTrue(self.buffer.at_eof()) self.buffer._buffer.append(object()) self.assertFalse(self.buffer.at_eof()) def test_feed_data(self): item = object() self.buffer.feed_data(item, 1) self.assertEqual([(item, 1)], list(self.buffer._buffer)) def test_feed_eof(self): self.buffer.feed_eof() self.assertTrue(self.buffer._eof) def test_read(self): item = object() read_task = asyncio.Task(self.buffer.read(), loop=self.loop) def cb(): self.buffer.feed_data(item, 1) self.loop.call_soon(cb) data = self.loop.run_until_complete(read_task) self.assertIs(item, data) def test_read_eof(self): read_task = asyncio.Task(self.buffer.read(), loop=self.loop) def cb(): self.buffer.feed_eof() self.loop.call_soon(cb) self.assertRaises( streams.EofStream, self.loop.run_until_complete, read_task) def test_read_cancelled(self): read_task = asyncio.Task(self.buffer.read(), loop=self.loop) test_utils.run_briefly(self.loop) waiter = self.buffer._waiter self.assertIsInstance(waiter, asyncio.Future) read_task.cancel() self.assertRaises( asyncio.CancelledError, self.loop.run_until_complete, read_task) self.assertTrue(waiter.cancelled()) self.assertIsNone(self.buffer._waiter) self.buffer.feed_data(b'test', 4) self.assertIsNone(self.buffer._waiter) def test_read_until_eof(self): item = object() self.buffer.feed_data(item, 1) self.buffer.feed_eof() data = self.loop.run_until_complete(self.buffer.read()) self.assertIs(data, item) self.assertRaises( streams.EofStream, self.loop.run_until_complete, self.buffer.read()) def test_read_exception(self): self.buffer.set_exception(ValueError()) self.assertRaises( ValueError, self.loop.run_until_complete, self.buffer.read()) def test_read_exception_with_data(self): val = object() self.buffer.feed_data(val, 1) self.buffer.set_exception(ValueError()) self.assertIs(val, self.loop.run_until_complete(self.buffer.read())) self.assertRaises( ValueError, self.loop.run_until_complete, self.buffer.read()) def test_read_exception_on_wait(self): read_task = asyncio.Task(self.buffer.read(), loop=self.loop) test_utils.run_briefly(self.loop) self.assertIsInstance(self.buffer._waiter, asyncio.Future) self.buffer.feed_eof() self.buffer.set_exception(ValueError()) self.assertRaises( ValueError, self.loop.run_until_complete, read_task) def test_exception(self): self.assertIsNone(self.buffer.exception()) exc = ValueError() self.buffer.set_exception(exc) self.assertIs(self.buffer.exception(), exc) def test_exception_waiter(self): @asyncio.coroutine def set_err(): self.buffer.set_exception(ValueError()) t1 = asyncio.Task(self.buffer.read(), loop=self.loop) t2 = asyncio.Task(set_err(), loop=self.loop) self.loop.run_until_complete(asyncio.wait([t1, t2], loop=self.loop)) self.assertRaises(ValueError, t1.result) class TestDataQueue(unittest.TestCase, DataQueueMixin): def setUp(self): self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) self.buffer = streams.DataQueue(loop=self.loop) def tearDown(self): self.loop.close() class TestChunksQueue(unittest.TestCase, DataQueueMixin): def setUp(self): self.loop = asyncio.new_event_loop() asyncio.set_event_loop(None) self.buffer = streams.ChunksQueue(loop=self.loop) def tearDown(self): self.loop.close() def test_read_eof(self): read_task = asyncio.Task(self.buffer.read(), loop=self.loop) def cb(): self.buffer.feed_eof() self.loop.call_soon(cb) self.loop.run_until_complete(read_task) self.assertTrue(self.buffer.at_eof()) def test_read_until_eof(self): item = object() self.buffer.feed_data(item, 1) self.buffer.feed_eof() data = self.loop.run_until_complete(self.buffer.read()) self.assertIs(data, item) thing = self.loop.run_until_complete(self.buffer.read()) self.assertEqual(thing, b'') self.assertTrue(self.buffer.at_eof()) def test_readany(self): self.assertIs(self.buffer.read.__func__, self.buffer.readany.__func__)<|fim▁end|>
with self.assertRaises(asyncio.IncompleteReadError) as cm: self.loop.run_until_complete(read_task) self.assertEqual(cm.exception.partial, self.DATA) self.assertEqual(cm.exception.expected, n)
<|file_name|>delete-version.rs<|end_file_name|><|fim▁begin|>// Purge all references to a crate's version from the database. // // Please be super sure you want to do this before running this. // // Usage: // cargo run --bin delete-version crate-name version-number #![deny(warnings)] extern crate cargo_registry; extern crate postgres; extern crate time; extern crate semver; use std::env; use std::io; use std::io::prelude::*; use cargo_registry::{Crate, Version}; fn main() { let conn = postgres::Connection::connect(&env("DATABASE_URL")[..], &postgres::SslMode::None).unwrap(); { let tx = conn.transaction().unwrap(); delete(&tx); tx.set_commit(); tx.finish().unwrap(); } } fn env(s: &str) -> String { match env::var(s).ok() { Some(s) => s, None => panic!("must have `{}` defined", s), } }<|fim▁hole|>fn delete(tx: &postgres::Transaction) { let name = match env::args().nth(1) { None => { println!("needs a crate-name argument"); return } Some(s) => s, }; let version = match env::args().nth(2) { None => { println!("needs a version argument"); return } Some(s) => s, }; let version = semver::Version::parse(&version).unwrap(); let krate = Crate::find_by_name(tx, &name).unwrap(); let v = Version::find_by_num(tx, krate.id, &version).unwrap().unwrap(); print!("Are you sure you want to delete {}#{} ({}) [y/N]: ", name, version, v.id); io::stdout().flush().unwrap(); let mut line = String::new(); io::stdin().read_line(&mut line).unwrap(); if !line.starts_with("y") { return } println!("deleting version {} ({})", v.num, v.id); let n = tx.execute("DELETE FROM version_downloads WHERE version_id = $1", &[&v.id]).unwrap(); println!(" {} download records deleted", n); let n = tx.execute("DELETE FROM version_authors WHERE version_id = $1", &[&v.id]).unwrap(); println!(" {} author records deleted", n); let n = tx.execute("DELETE FROM dependencies WHERE version_id = $1", &[&v.id]).unwrap(); println!(" {} dependencies deleted", n); tx.execute("DELETE FROM versions WHERE id = $1", &[&v.id]).unwrap(); print!("commit? [y/N]: "); io::stdout().flush().unwrap(); let mut line = String::new(); io::stdin().read_line(&mut line).unwrap(); if !line.starts_with("y") { panic!("aborting transaction"); } }<|fim▁end|>
<|file_name|>courses.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from optparse import make_option from django.core.management.base import BaseCommand from messytables import XLSTableSet, headers_guess, headers_processor, offset_processor from data.models import Source, Course, MerlotCategory class Command(BaseCommand): help = "Utilities to merge our database with MERLOT" args = "--file" option_list = BaseCommand.option_list + ( make_option("--file", action="store", dest="filename", help="Source filename"), make_option("--source", action="store", dest="source_id", help="Source ID"), make_option("--provider", action="store", dest="provider_tag", help="Provider Tag"), ) def handle(self, *args, **options): if options.get('filename'): self.ku_openlearning(options.get('filename'), options.get('source_id')) def ku_openlearning(self, filename, source_id): CATEGORY_MAPPING = { 'Assessment of learning': 2298, #Assessment, 'Finance': 2235, 'Public Service': 'Criminal Justice', 'Health Science': 'Health Sciences', 'Management': 2248, 'Online Instruction': 'Hybrid and Online Course Development', 'Early Childhood': ['Career Counseling and Services', 'Childhood and Adolescence'], 'Law, Legal': 'Law', 'Psychology': 'Psychology', 'Customer Service': 2246, 'Communications': 'Communications', 'Professionalism': 'Personal Development' } source = Source.objects.get(pk=source_id) fh = open(filename, 'rb') table_set = XLSTableSet(fh) row_set = table_set.tables[0] offset, headers = headers_guess(row_set.sample) row_set.register_processor(headers_processor(headers)) row_set.register_processor(offset_processor(offset + 1)) for row in row_set: url = row[0].value title = row[1].value description = row[2].value # language = row[4].value # material_type = row[5].value license = row[6].value categories = row[7].value keywords = row[8].value # audience = row[9].value course, is_created = Course.objects.get_or_create( linkurl = url, provider = source.provider, source = source, defaults = { 'title': title, 'description': description, 'tags': keywords,<|fim▁hole|> 'creative_commons': 'Yes', 'creative_commons_commercial': 'No', 'creative_commons_derivatives': 'No' } ) merlot_cat = CATEGORY_MAPPING[categories] if type(merlot_cat) != list: merlot_cat = [merlot_cat,] for item in merlot_cat: try: m = MerlotCategory.objects.get(merlot_id=item) course.merlot_categories.add(m) except ValueError: m = MerlotCategory.objects.get(name=item) course.merlot_categories.add(m)<|fim▁end|>
'language': 'English', 'license': license, 'content_medium': 'text',
<|file_name|>api.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT. package mturk import ( "fmt" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awsutil" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/private/protocol" "github.com/aws/aws-sdk-go/private/protocol/jsonrpc" ) const opAcceptQualificationRequest = "AcceptQualificationRequest" // AcceptQualificationRequestRequest generates a "aws/request.Request" representing the // client's request for the AcceptQualificationRequest operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See AcceptQualificationRequest for more information on using the AcceptQualificationRequest // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the AcceptQualificationRequestRequest method. // req, resp := client.AcceptQualificationRequestRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/AcceptQualificationRequest func (c *MTurk) AcceptQualificationRequestRequest(input *AcceptQualificationRequestInput) (req *request.Request, output *AcceptQualificationRequestOutput) { op := &request.Operation{ Name: opAcceptQualificationRequest, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &AcceptQualificationRequestInput{} } output = &AcceptQualificationRequestOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // AcceptQualificationRequest API operation for Amazon Mechanical Turk. // // The AcceptQualificationRequest operation approves a Worker's request for // a Qualification. // // Only the owner of the Qualification type can grant a Qualification request // for that type. // // A successful request for the AcceptQualificationRequest operation returns // with no errors and an empty body. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation AcceptQualificationRequest for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/AcceptQualificationRequest func (c *MTurk) AcceptQualificationRequest(input *AcceptQualificationRequestInput) (*AcceptQualificationRequestOutput, error) { req, out := c.AcceptQualificationRequestRequest(input) return out, req.Send() } // AcceptQualificationRequestWithContext is the same as AcceptQualificationRequest with the addition of // the ability to pass a context and additional request options. // // See AcceptQualificationRequest for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) AcceptQualificationRequestWithContext(ctx aws.Context, input *AcceptQualificationRequestInput, opts ...request.Option) (*AcceptQualificationRequestOutput, error) { req, out := c.AcceptQualificationRequestRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opApproveAssignment = "ApproveAssignment" // ApproveAssignmentRequest generates a "aws/request.Request" representing the // client's request for the ApproveAssignment operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See ApproveAssignment for more information on using the ApproveAssignment // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the ApproveAssignmentRequest method. // req, resp := client.ApproveAssignmentRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ApproveAssignment func (c *MTurk) ApproveAssignmentRequest(input *ApproveAssignmentInput) (req *request.Request, output *ApproveAssignmentOutput) { op := &request.Operation{ Name: opApproveAssignment, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &ApproveAssignmentInput{} } output = &ApproveAssignmentOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // ApproveAssignment API operation for Amazon Mechanical Turk. // // The ApproveAssignment operation approves the results of a completed assignment. // // Approving an assignment initiates two payments from the Requester's Amazon.com // account // // * The Worker who submitted the results is paid the reward specified in // the HIT. // // * Amazon Mechanical Turk fees are debited. // // If the Requester's account does not have adequate funds for these payments, // the call to ApproveAssignment returns an exception, and the approval is not // processed. You can include an optional feedback message with the approval, // which the Worker can see in the Status section of the web site. // // You can also call this operation for assignments that were previous rejected // and approve them by explicitly overriding the previous rejection. This only // works on rejected assignments that were submitted within the previous 30 // days and only if the assignment's related HIT has not been deleted. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation ApproveAssignment for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ApproveAssignment func (c *MTurk) ApproveAssignment(input *ApproveAssignmentInput) (*ApproveAssignmentOutput, error) { req, out := c.ApproveAssignmentRequest(input) return out, req.Send() } // ApproveAssignmentWithContext is the same as ApproveAssignment with the addition of // the ability to pass a context and additional request options. // // See ApproveAssignment for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ApproveAssignmentWithContext(ctx aws.Context, input *ApproveAssignmentInput, opts ...request.Option) (*ApproveAssignmentOutput, error) { req, out := c.ApproveAssignmentRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opAssociateQualificationWithWorker = "AssociateQualificationWithWorker" // AssociateQualificationWithWorkerRequest generates a "aws/request.Request" representing the // client's request for the AssociateQualificationWithWorker operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See AssociateQualificationWithWorker for more information on using the AssociateQualificationWithWorker // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the AssociateQualificationWithWorkerRequest method. // req, resp := client.AssociateQualificationWithWorkerRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/AssociateQualificationWithWorker func (c *MTurk) AssociateQualificationWithWorkerRequest(input *AssociateQualificationWithWorkerInput) (req *request.Request, output *AssociateQualificationWithWorkerOutput) { op := &request.Operation{ Name: opAssociateQualificationWithWorker, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &AssociateQualificationWithWorkerInput{} } output = &AssociateQualificationWithWorkerOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // AssociateQualificationWithWorker API operation for Amazon Mechanical Turk. // // The AssociateQualificationWithWorker operation gives a Worker a Qualification. // AssociateQualificationWithWorker does not require that the Worker submit // a Qualification request. It gives the Qualification directly to the Worker. // // You can only assign a Qualification of a Qualification type that you created // (using the CreateQualificationType operation). // // Note: AssociateQualificationWithWorker does not affect any pending Qualification // requests for the Qualification by the Worker. If you assign a Qualification // to a Worker, then later grant a Qualification request made by the Worker, // the granting of the request may modify the Qualification score. To resolve // a pending Qualification request without affecting the Qualification the Worker // already has, reject the request with the RejectQualificationRequest operation. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation AssociateQualificationWithWorker for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/AssociateQualificationWithWorker func (c *MTurk) AssociateQualificationWithWorker(input *AssociateQualificationWithWorkerInput) (*AssociateQualificationWithWorkerOutput, error) { req, out := c.AssociateQualificationWithWorkerRequest(input) return out, req.Send() } // AssociateQualificationWithWorkerWithContext is the same as AssociateQualificationWithWorker with the addition of // the ability to pass a context and additional request options. // // See AssociateQualificationWithWorker for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) AssociateQualificationWithWorkerWithContext(ctx aws.Context, input *AssociateQualificationWithWorkerInput, opts ...request.Option) (*AssociateQualificationWithWorkerOutput, error) { req, out := c.AssociateQualificationWithWorkerRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opCreateAdditionalAssignmentsForHIT = "CreateAdditionalAssignmentsForHIT" // CreateAdditionalAssignmentsForHITRequest generates a "aws/request.Request" representing the // client's request for the CreateAdditionalAssignmentsForHIT operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See CreateAdditionalAssignmentsForHIT for more information on using the CreateAdditionalAssignmentsForHIT // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the CreateAdditionalAssignmentsForHITRequest method. // req, resp := client.CreateAdditionalAssignmentsForHITRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/CreateAdditionalAssignmentsForHIT func (c *MTurk) CreateAdditionalAssignmentsForHITRequest(input *CreateAdditionalAssignmentsForHITInput) (req *request.Request, output *CreateAdditionalAssignmentsForHITOutput) { op := &request.Operation{ Name: opCreateAdditionalAssignmentsForHIT, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &CreateAdditionalAssignmentsForHITInput{} } output = &CreateAdditionalAssignmentsForHITOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // CreateAdditionalAssignmentsForHIT API operation for Amazon Mechanical Turk. // // The CreateAdditionalAssignmentsForHIT operation increases the maximum number // of assignments of an existing HIT. // // To extend the maximum number of assignments, specify the number of additional // assignments. // // * HITs created with fewer than 10 assignments cannot be extended to have // 10 or more assignments. Attempting to add assignments in a way that brings // the total number of assignments for a HIT from fewer than 10 assignments // to 10 or more assignments will result in an AWS.MechanicalTurk.InvalidMaximumAssignmentsIncrease // exception. // // * HITs that were created before July 22, 2015 cannot be extended. Attempting // to extend HITs that were created before July 22, 2015 will result in an // AWS.MechanicalTurk.HITTooOldForExtension exception. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation CreateAdditionalAssignmentsForHIT for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/CreateAdditionalAssignmentsForHIT func (c *MTurk) CreateAdditionalAssignmentsForHIT(input *CreateAdditionalAssignmentsForHITInput) (*CreateAdditionalAssignmentsForHITOutput, error) { req, out := c.CreateAdditionalAssignmentsForHITRequest(input) return out, req.Send() } // CreateAdditionalAssignmentsForHITWithContext is the same as CreateAdditionalAssignmentsForHIT with the addition of // the ability to pass a context and additional request options. // // See CreateAdditionalAssignmentsForHIT for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) CreateAdditionalAssignmentsForHITWithContext(ctx aws.Context, input *CreateAdditionalAssignmentsForHITInput, opts ...request.Option) (*CreateAdditionalAssignmentsForHITOutput, error) { req, out := c.CreateAdditionalAssignmentsForHITRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opCreateHIT = "CreateHIT" // CreateHITRequest generates a "aws/request.Request" representing the // client's request for the CreateHIT operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See CreateHIT for more information on using the CreateHIT // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the CreateHITRequest method. // req, resp := client.CreateHITRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/CreateHIT func (c *MTurk) CreateHITRequest(input *CreateHITInput) (req *request.Request, output *CreateHITOutput) { op := &request.Operation{ Name: opCreateHIT, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &CreateHITInput{} } output = &CreateHITOutput{} req = c.newRequest(op, input, output) return } // CreateHIT API operation for Amazon Mechanical Turk. // // The CreateHIT operation creates a new Human Intelligence Task (HIT). The // new HIT is made available for Workers to find and accept on the Amazon Mechanical // Turk website. // // This operation allows you to specify a new HIT by passing in values for the // properties of the HIT, such as its title, reward amount and number of assignments. // When you pass these values to CreateHIT, a new HIT is created for you, with // a new HITTypeID. The HITTypeID can be used to create additional HITs in the // future without needing to specify common parameters such as the title, description // and reward amount each time. // // An alternative way to create HITs is to first generate a HITTypeID using // the CreateHITType operation and then call the CreateHITWithHITType operation. // This is the recommended best practice for Requesters who are creating large // numbers of HITs. // // CreateHIT also supports several ways to provide question data: by providing // a value for the Question parameter that fully specifies the contents of the // HIT, or by providing a HitLayoutId and associated HitLayoutParameters. // // If a HIT is created with 10 or more maximum assignments, there is an additional // fee. For more information, see Amazon Mechanical Turk Pricing (https://requester.mturk.com/pricing). // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation CreateHIT for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/CreateHIT func (c *MTurk) CreateHIT(input *CreateHITInput) (*CreateHITOutput, error) { req, out := c.CreateHITRequest(input) return out, req.Send() } // CreateHITWithContext is the same as CreateHIT with the addition of // the ability to pass a context and additional request options. // // See CreateHIT for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) CreateHITWithContext(ctx aws.Context, input *CreateHITInput, opts ...request.Option) (*CreateHITOutput, error) { req, out := c.CreateHITRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opCreateHITType = "CreateHITType" // CreateHITTypeRequest generates a "aws/request.Request" representing the // client's request for the CreateHITType operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See CreateHITType for more information on using the CreateHITType // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the CreateHITTypeRequest method. // req, resp := client.CreateHITTypeRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/CreateHITType func (c *MTurk) CreateHITTypeRequest(input *CreateHITTypeInput) (req *request.Request, output *CreateHITTypeOutput) { op := &request.Operation{ Name: opCreateHITType, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &CreateHITTypeInput{} } output = &CreateHITTypeOutput{} req = c.newRequest(op, input, output) return } // CreateHITType API operation for Amazon Mechanical Turk. // // The CreateHITType operation creates a new HIT type. This operation allows // you to define a standard set of HIT properties to use when creating HITs. // If you register a HIT type with values that match an existing HIT type, the // HIT type ID of the existing type will be returned. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation CreateHITType for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/CreateHITType func (c *MTurk) CreateHITType(input *CreateHITTypeInput) (*CreateHITTypeOutput, error) { req, out := c.CreateHITTypeRequest(input) return out, req.Send() } // CreateHITTypeWithContext is the same as CreateHITType with the addition of // the ability to pass a context and additional request options. // // See CreateHITType for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) CreateHITTypeWithContext(ctx aws.Context, input *CreateHITTypeInput, opts ...request.Option) (*CreateHITTypeOutput, error) { req, out := c.CreateHITTypeRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opCreateHITWithHITType = "CreateHITWithHITType" // CreateHITWithHITTypeRequest generates a "aws/request.Request" representing the // client's request for the CreateHITWithHITType operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See CreateHITWithHITType for more information on using the CreateHITWithHITType // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the CreateHITWithHITTypeRequest method. // req, resp := client.CreateHITWithHITTypeRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/CreateHITWithHITType func (c *MTurk) CreateHITWithHITTypeRequest(input *CreateHITWithHITTypeInput) (req *request.Request, output *CreateHITWithHITTypeOutput) { op := &request.Operation{ Name: opCreateHITWithHITType, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &CreateHITWithHITTypeInput{} } output = &CreateHITWithHITTypeOutput{} req = c.newRequest(op, input, output) return } // CreateHITWithHITType API operation for Amazon Mechanical Turk. // // The CreateHITWithHITType operation creates a new Human Intelligence Task // (HIT) using an existing HITTypeID generated by the CreateHITType operation. // // This is an alternative way to create HITs from the CreateHIT operation. This // is the recommended best practice for Requesters who are creating large numbers // of HITs. // // CreateHITWithHITType also supports several ways to provide question data: // by providing a value for the Question parameter that fully specifies the // contents of the HIT, or by providing a HitLayoutId and associated HitLayoutParameters. // // If a HIT is created with 10 or more maximum assignments, there is an additional // fee. For more information, see Amazon Mechanical Turk Pricing (https://requester.mturk.com/pricing). // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation CreateHITWithHITType for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/CreateHITWithHITType func (c *MTurk) CreateHITWithHITType(input *CreateHITWithHITTypeInput) (*CreateHITWithHITTypeOutput, error) { req, out := c.CreateHITWithHITTypeRequest(input) return out, req.Send() } // CreateHITWithHITTypeWithContext is the same as CreateHITWithHITType with the addition of // the ability to pass a context and additional request options. // // See CreateHITWithHITType for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) CreateHITWithHITTypeWithContext(ctx aws.Context, input *CreateHITWithHITTypeInput, opts ...request.Option) (*CreateHITWithHITTypeOutput, error) { req, out := c.CreateHITWithHITTypeRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opCreateQualificationType = "CreateQualificationType" // CreateQualificationTypeRequest generates a "aws/request.Request" representing the // client's request for the CreateQualificationType operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See CreateQualificationType for more information on using the CreateQualificationType // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the CreateQualificationTypeRequest method. // req, resp := client.CreateQualificationTypeRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/CreateQualificationType func (c *MTurk) CreateQualificationTypeRequest(input *CreateQualificationTypeInput) (req *request.Request, output *CreateQualificationTypeOutput) { op := &request.Operation{ Name: opCreateQualificationType, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &CreateQualificationTypeInput{} } output = &CreateQualificationTypeOutput{} req = c.newRequest(op, input, output) return } // CreateQualificationType API operation for Amazon Mechanical Turk. // // The CreateQualificationType operation creates a new Qualification type, which // is represented by a QualificationType data structure. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation CreateQualificationType for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/CreateQualificationType func (c *MTurk) CreateQualificationType(input *CreateQualificationTypeInput) (*CreateQualificationTypeOutput, error) { req, out := c.CreateQualificationTypeRequest(input) return out, req.Send() } // CreateQualificationTypeWithContext is the same as CreateQualificationType with the addition of // the ability to pass a context and additional request options. // // See CreateQualificationType for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) CreateQualificationTypeWithContext(ctx aws.Context, input *CreateQualificationTypeInput, opts ...request.Option) (*CreateQualificationTypeOutput, error) { req, out := c.CreateQualificationTypeRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opCreateWorkerBlock = "CreateWorkerBlock" // CreateWorkerBlockRequest generates a "aws/request.Request" representing the // client's request for the CreateWorkerBlock operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See CreateWorkerBlock for more information on using the CreateWorkerBlock // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the CreateWorkerBlockRequest method. // req, resp := client.CreateWorkerBlockRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/CreateWorkerBlock func (c *MTurk) CreateWorkerBlockRequest(input *CreateWorkerBlockInput) (req *request.Request, output *CreateWorkerBlockOutput) { op := &request.Operation{ Name: opCreateWorkerBlock, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &CreateWorkerBlockInput{} } output = &CreateWorkerBlockOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // CreateWorkerBlock API operation for Amazon Mechanical Turk. // // The CreateWorkerBlock operation allows you to prevent a Worker from working // on your HITs. For example, you can block a Worker who is producing poor quality // work. You can block up to 100,000 Workers. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation CreateWorkerBlock for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/CreateWorkerBlock func (c *MTurk) CreateWorkerBlock(input *CreateWorkerBlockInput) (*CreateWorkerBlockOutput, error) { req, out := c.CreateWorkerBlockRequest(input) return out, req.Send() } // CreateWorkerBlockWithContext is the same as CreateWorkerBlock with the addition of // the ability to pass a context and additional request options. // // See CreateWorkerBlock for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) CreateWorkerBlockWithContext(ctx aws.Context, input *CreateWorkerBlockInput, opts ...request.Option) (*CreateWorkerBlockOutput, error) { req, out := c.CreateWorkerBlockRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opDeleteHIT = "DeleteHIT" // DeleteHITRequest generates a "aws/request.Request" representing the // client's request for the DeleteHIT operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See DeleteHIT for more information on using the DeleteHIT // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the DeleteHITRequest method. // req, resp := client.DeleteHITRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/DeleteHIT func (c *MTurk) DeleteHITRequest(input *DeleteHITInput) (req *request.Request, output *DeleteHITOutput) { op := &request.Operation{ Name: opDeleteHIT, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &DeleteHITInput{} } output = &DeleteHITOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // DeleteHIT API operation for Amazon Mechanical Turk. // // The DeleteHIT operation is used to delete HIT that is no longer needed. Only // the Requester who created the HIT can delete it. // // You can only dispose of HITs that are in the Reviewable state, with all of // their submitted assignments already either approved or rejected. If you call // the DeleteHIT operation on a HIT that is not in the Reviewable state (for // example, that has not expired, or still has active assignments), or on a // HIT that is Reviewable but without all of its submitted assignments already // approved or rejected, the service will return an error. // // * HITs are automatically disposed of after 120 days. // // * After you dispose of a HIT, you can no longer approve the HIT's rejected // assignments. // // * Disposed HITs are not returned in results for the ListHITs operation. // // * Disposing HITs can improve the performance of operations such as ListReviewableHITs // and ListHITs. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation DeleteHIT for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/DeleteHIT func (c *MTurk) DeleteHIT(input *DeleteHITInput) (*DeleteHITOutput, error) { req, out := c.DeleteHITRequest(input) return out, req.Send() } // DeleteHITWithContext is the same as DeleteHIT with the addition of // the ability to pass a context and additional request options. // // See DeleteHIT for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) DeleteHITWithContext(ctx aws.Context, input *DeleteHITInput, opts ...request.Option) (*DeleteHITOutput, error) { req, out := c.DeleteHITRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opDeleteQualificationType = "DeleteQualificationType" // DeleteQualificationTypeRequest generates a "aws/request.Request" representing the // client's request for the DeleteQualificationType operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See DeleteQualificationType for more information on using the DeleteQualificationType // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the DeleteQualificationTypeRequest method. // req, resp := client.DeleteQualificationTypeRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/DeleteQualificationType func (c *MTurk) DeleteQualificationTypeRequest(input *DeleteQualificationTypeInput) (req *request.Request, output *DeleteQualificationTypeOutput) { op := &request.Operation{ Name: opDeleteQualificationType, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &DeleteQualificationTypeInput{} } output = &DeleteQualificationTypeOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // DeleteQualificationType API operation for Amazon Mechanical Turk. // // The DeleteQualificationType deletes a Qualification type and deletes any // HIT types that are associated with the Qualification type. // // This operation does not revoke Qualifications already assigned to Workers // because the Qualifications might be needed for active HITs. If there are // any pending requests for the Qualification type, Amazon Mechanical Turk rejects // those requests. After you delete a Qualification type, you can no longer // use it to create HITs or HIT types. // // DeleteQualificationType must wait for all the HITs that use the deleted Qualification // type to be deleted before completing. It may take up to 48 hours before DeleteQualificationType // completes and the unique name of the Qualification type is available for // reuse with CreateQualificationType. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation DeleteQualificationType for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/DeleteQualificationType func (c *MTurk) DeleteQualificationType(input *DeleteQualificationTypeInput) (*DeleteQualificationTypeOutput, error) { req, out := c.DeleteQualificationTypeRequest(input) return out, req.Send() } // DeleteQualificationTypeWithContext is the same as DeleteQualificationType with the addition of // the ability to pass a context and additional request options. // // See DeleteQualificationType for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) DeleteQualificationTypeWithContext(ctx aws.Context, input *DeleteQualificationTypeInput, opts ...request.Option) (*DeleteQualificationTypeOutput, error) { req, out := c.DeleteQualificationTypeRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opDeleteWorkerBlock = "DeleteWorkerBlock" // DeleteWorkerBlockRequest generates a "aws/request.Request" representing the // client's request for the DeleteWorkerBlock operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See DeleteWorkerBlock for more information on using the DeleteWorkerBlock // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the DeleteWorkerBlockRequest method. // req, resp := client.DeleteWorkerBlockRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/DeleteWorkerBlock func (c *MTurk) DeleteWorkerBlockRequest(input *DeleteWorkerBlockInput) (req *request.Request, output *DeleteWorkerBlockOutput) { op := &request.Operation{ Name: opDeleteWorkerBlock, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &DeleteWorkerBlockInput{} } output = &DeleteWorkerBlockOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // DeleteWorkerBlock API operation for Amazon Mechanical Turk. // // The DeleteWorkerBlock operation allows you to reinstate a blocked Worker // to work on your HITs. This operation reverses the effects of the CreateWorkerBlock // operation. You need the Worker ID to use this operation. If the Worker ID // is missing or invalid, this operation fails and returns the message “WorkerId // is invalid.” If the specified Worker is not blocked, this operation returns // successfully. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation DeleteWorkerBlock for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/DeleteWorkerBlock func (c *MTurk) DeleteWorkerBlock(input *DeleteWorkerBlockInput) (*DeleteWorkerBlockOutput, error) { req, out := c.DeleteWorkerBlockRequest(input) return out, req.Send() } // DeleteWorkerBlockWithContext is the same as DeleteWorkerBlock with the addition of // the ability to pass a context and additional request options. // // See DeleteWorkerBlock for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) DeleteWorkerBlockWithContext(ctx aws.Context, input *DeleteWorkerBlockInput, opts ...request.Option) (*DeleteWorkerBlockOutput, error) { req, out := c.DeleteWorkerBlockRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opDisassociateQualificationFromWorker = "DisassociateQualificationFromWorker" // DisassociateQualificationFromWorkerRequest generates a "aws/request.Request" representing the // client's request for the DisassociateQualificationFromWorker operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See DisassociateQualificationFromWorker for more information on using the DisassociateQualificationFromWorker // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the DisassociateQualificationFromWorkerRequest method. // req, resp := client.DisassociateQualificationFromWorkerRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/DisassociateQualificationFromWorker func (c *MTurk) DisassociateQualificationFromWorkerRequest(input *DisassociateQualificationFromWorkerInput) (req *request.Request, output *DisassociateQualificationFromWorkerOutput) { op := &request.Operation{ Name: opDisassociateQualificationFromWorker, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &DisassociateQualificationFromWorkerInput{} } output = &DisassociateQualificationFromWorkerOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // DisassociateQualificationFromWorker API operation for Amazon Mechanical Turk. // // The DisassociateQualificationFromWorker revokes a previously granted Qualification // from a user. // // You can provide a text message explaining why the Qualification was revoked. // The user who had the Qualification can see this message. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation DisassociateQualificationFromWorker for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/DisassociateQualificationFromWorker func (c *MTurk) DisassociateQualificationFromWorker(input *DisassociateQualificationFromWorkerInput) (*DisassociateQualificationFromWorkerOutput, error) { req, out := c.DisassociateQualificationFromWorkerRequest(input) return out, req.Send() } // DisassociateQualificationFromWorkerWithContext is the same as DisassociateQualificationFromWorker with the addition of // the ability to pass a context and additional request options. // // See DisassociateQualificationFromWorker for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) DisassociateQualificationFromWorkerWithContext(ctx aws.Context, input *DisassociateQualificationFromWorkerInput, opts ...request.Option) (*DisassociateQualificationFromWorkerOutput, error) { req, out := c.DisassociateQualificationFromWorkerRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opGetAccountBalance = "GetAccountBalance" // GetAccountBalanceRequest generates a "aws/request.Request" representing the // client's request for the GetAccountBalance operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See GetAccountBalance for more information on using the GetAccountBalance // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the GetAccountBalanceRequest method. // req, resp := client.GetAccountBalanceRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/GetAccountBalance func (c *MTurk) GetAccountBalanceRequest(input *GetAccountBalanceInput) (req *request.Request, output *GetAccountBalanceOutput) { op := &request.Operation{ Name: opGetAccountBalance, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &GetAccountBalanceInput{} } output = &GetAccountBalanceOutput{} req = c.newRequest(op, input, output) return } // GetAccountBalance API operation for Amazon Mechanical Turk. // // The GetAccountBalance operation retrieves the Prepaid HITs balance in your // Amazon Mechanical Turk account if you are a Prepaid Requester. Alternatively, // this operation will retrieve the remaining available AWS Billing usage if // you have enabled AWS Billing. Note: If you have enabled AWS Billing and still // have a remaining Prepaid HITs balance, this balance can be viewed on the // My Account page in the Requester console. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation GetAccountBalance for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/GetAccountBalance func (c *MTurk) GetAccountBalance(input *GetAccountBalanceInput) (*GetAccountBalanceOutput, error) { req, out := c.GetAccountBalanceRequest(input) return out, req.Send() } // GetAccountBalanceWithContext is the same as GetAccountBalance with the addition of // the ability to pass a context and additional request options. // // See GetAccountBalance for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) GetAccountBalanceWithContext(ctx aws.Context, input *GetAccountBalanceInput, opts ...request.Option) (*GetAccountBalanceOutput, error) { req, out := c.GetAccountBalanceRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opGetAssignment = "GetAssignment" // GetAssignmentRequest generates a "aws/request.Request" representing the // client's request for the GetAssignment operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See GetAssignment for more information on using the GetAssignment // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the GetAssignmentRequest method. // req, resp := client.GetAssignmentRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/GetAssignment func (c *MTurk) GetAssignmentRequest(input *GetAssignmentInput) (req *request.Request, output *GetAssignmentOutput) { op := &request.Operation{ Name: opGetAssignment, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &GetAssignmentInput{} } output = &GetAssignmentOutput{} req = c.newRequest(op, input, output) return } // GetAssignment API operation for Amazon Mechanical Turk. // // The GetAssignment operation retrieves the details of the specified Assignment. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation GetAssignment for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/GetAssignment func (c *MTurk) GetAssignment(input *GetAssignmentInput) (*GetAssignmentOutput, error) { req, out := c.GetAssignmentRequest(input) return out, req.Send() } // GetAssignmentWithContext is the same as GetAssignment with the addition of // the ability to pass a context and additional request options. // // See GetAssignment for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) GetAssignmentWithContext(ctx aws.Context, input *GetAssignmentInput, opts ...request.Option) (*GetAssignmentOutput, error) { req, out := c.GetAssignmentRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opGetFileUploadURL = "GetFileUploadURL" // GetFileUploadURLRequest generates a "aws/request.Request" representing the // client's request for the GetFileUploadURL operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See GetFileUploadURL for more information on using the GetFileUploadURL // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the GetFileUploadURLRequest method. // req, resp := client.GetFileUploadURLRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/GetFileUploadURL func (c *MTurk) GetFileUploadURLRequest(input *GetFileUploadURLInput) (req *request.Request, output *GetFileUploadURLOutput) { op := &request.Operation{ Name: opGetFileUploadURL, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &GetFileUploadURLInput{} } output = &GetFileUploadURLOutput{} req = c.newRequest(op, input, output) return } // GetFileUploadURL API operation for Amazon Mechanical Turk. // // The GetFileUploadURL operation generates and returns a temporary URL. You // use the temporary URL to retrieve a file uploaded by a Worker as an answer // to a FileUploadAnswer question for a HIT. The temporary URL is generated // the instant the GetFileUploadURL operation is called, and is valid for 60 // seconds. You can get a temporary file upload URL any time until the HIT is // disposed. After the HIT is disposed, any uploaded files are deleted, and // cannot be retrieved. Pending Deprecation on December 12, 2017. The Answer // Specification structure will no longer support the FileUploadAnswer element // to be used for the QuestionForm data structure. Instead, we recommend that // Requesters who want to create HITs asking Workers to upload files to use // Amazon S3. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation GetFileUploadURL for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/GetFileUploadURL func (c *MTurk) GetFileUploadURL(input *GetFileUploadURLInput) (*GetFileUploadURLOutput, error) { req, out := c.GetFileUploadURLRequest(input) return out, req.Send() } // GetFileUploadURLWithContext is the same as GetFileUploadURL with the addition of // the ability to pass a context and additional request options. // // See GetFileUploadURL for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) GetFileUploadURLWithContext(ctx aws.Context, input *GetFileUploadURLInput, opts ...request.Option) (*GetFileUploadURLOutput, error) { req, out := c.GetFileUploadURLRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opGetHIT = "GetHIT" // GetHITRequest generates a "aws/request.Request" representing the // client's request for the GetHIT operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See GetHIT for more information on using the GetHIT // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the GetHITRequest method. // req, resp := client.GetHITRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/GetHIT func (c *MTurk) GetHITRequest(input *GetHITInput) (req *request.Request, output *GetHITOutput) { op := &request.Operation{ Name: opGetHIT, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &GetHITInput{} } output = &GetHITOutput{} req = c.newRequest(op, input, output) return } // GetHIT API operation for Amazon Mechanical Turk. // // The GetHIT operation retrieves the details of the specified HIT. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation GetHIT for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/GetHIT func (c *MTurk) GetHIT(input *GetHITInput) (*GetHITOutput, error) { req, out := c.GetHITRequest(input) return out, req.Send() } // GetHITWithContext is the same as GetHIT with the addition of // the ability to pass a context and additional request options. // // See GetHIT for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) GetHITWithContext(ctx aws.Context, input *GetHITInput, opts ...request.Option) (*GetHITOutput, error) { req, out := c.GetHITRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opGetQualificationScore = "GetQualificationScore" // GetQualificationScoreRequest generates a "aws/request.Request" representing the // client's request for the GetQualificationScore operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See GetQualificationScore for more information on using the GetQualificationScore // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the GetQualificationScoreRequest method. // req, resp := client.GetQualificationScoreRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/GetQualificationScore func (c *MTurk) GetQualificationScoreRequest(input *GetQualificationScoreInput) (req *request.Request, output *GetQualificationScoreOutput) { op := &request.Operation{ Name: opGetQualificationScore, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &GetQualificationScoreInput{} } output = &GetQualificationScoreOutput{} req = c.newRequest(op, input, output) return } // GetQualificationScore API operation for Amazon Mechanical Turk. // // The GetQualificationScore operation returns the value of a Worker's Qualification // for a given Qualification type. // // To get a Worker's Qualification, you must know the Worker's ID. The Worker's // ID is included in the assignment data returned by the ListAssignmentsForHIT // operation. // // Only the owner of a Qualification type can query the value of a Worker's // Qualification of that type. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation GetQualificationScore for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/GetQualificationScore func (c *MTurk) GetQualificationScore(input *GetQualificationScoreInput) (*GetQualificationScoreOutput, error) { req, out := c.GetQualificationScoreRequest(input) return out, req.Send() } // GetQualificationScoreWithContext is the same as GetQualificationScore with the addition of // the ability to pass a context and additional request options. // // See GetQualificationScore for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) GetQualificationScoreWithContext(ctx aws.Context, input *GetQualificationScoreInput, opts ...request.Option) (*GetQualificationScoreOutput, error) { req, out := c.GetQualificationScoreRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opGetQualificationType = "GetQualificationType" // GetQualificationTypeRequest generates a "aws/request.Request" representing the // client's request for the GetQualificationType operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See GetQualificationType for more information on using the GetQualificationType // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the GetQualificationTypeRequest method. // req, resp := client.GetQualificationTypeRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/GetQualificationType func (c *MTurk) GetQualificationTypeRequest(input *GetQualificationTypeInput) (req *request.Request, output *GetQualificationTypeOutput) { op := &request.Operation{ Name: opGetQualificationType, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &GetQualificationTypeInput{} } output = &GetQualificationTypeOutput{} req = c.newRequest(op, input, output) return } // GetQualificationType API operation for Amazon Mechanical Turk. // // The GetQualificationTypeoperation retrieves information about a Qualification // type using its ID. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation GetQualificationType for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/GetQualificationType func (c *MTurk) GetQualificationType(input *GetQualificationTypeInput) (*GetQualificationTypeOutput, error) { req, out := c.GetQualificationTypeRequest(input) return out, req.Send() } // GetQualificationTypeWithContext is the same as GetQualificationType with the addition of // the ability to pass a context and additional request options. // // See GetQualificationType for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) GetQualificationTypeWithContext(ctx aws.Context, input *GetQualificationTypeInput, opts ...request.Option) (*GetQualificationTypeOutput, error) { req, out := c.GetQualificationTypeRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opListAssignmentsForHIT = "ListAssignmentsForHIT" // ListAssignmentsForHITRequest generates a "aws/request.Request" representing the // client's request for the ListAssignmentsForHIT operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See ListAssignmentsForHIT for more information on using the ListAssignmentsForHIT // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the ListAssignmentsForHITRequest method. // req, resp := client.ListAssignmentsForHITRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListAssignmentsForHIT func (c *MTurk) ListAssignmentsForHITRequest(input *ListAssignmentsForHITInput) (req *request.Request, output *ListAssignmentsForHITOutput) { op := &request.Operation{ Name: opListAssignmentsForHIT, HTTPMethod: "POST", HTTPPath: "/", Paginator: &request.Paginator{ InputTokens: []string{"NextToken"}, OutputTokens: []string{"NextToken"}, LimitToken: "MaxResults", TruncationToken: "", }, } if input == nil { input = &ListAssignmentsForHITInput{} } output = &ListAssignmentsForHITOutput{} req = c.newRequest(op, input, output) return } // ListAssignmentsForHIT API operation for Amazon Mechanical Turk. // // The ListAssignmentsForHIT operation retrieves completed assignments for a // HIT. You can use this operation to retrieve the results for a HIT. // // You can get assignments for a HIT at any time, even if the HIT is not yet // Reviewable. If a HIT requested multiple assignments, and has received some // results but has not yet become Reviewable, you can still retrieve the partial // results with this operation. // // Use the AssignmentStatus parameter to control which set of assignments for // a HIT are returned. The ListAssignmentsForHIT operation can return submitted // assignments awaiting approval, or it can return assignments that have already // been approved or rejected. You can set AssignmentStatus=Approved,Rejected // to get assignments that have already been approved and rejected together // in one result set. // // Only the Requester who created the HIT can retrieve the assignments for that // HIT. // // Results are sorted and divided into numbered pages and the operation returns // a single page of results. You can use the parameters of the operation to // control sorting and pagination. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation ListAssignmentsForHIT for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListAssignmentsForHIT func (c *MTurk) ListAssignmentsForHIT(input *ListAssignmentsForHITInput) (*ListAssignmentsForHITOutput, error) { req, out := c.ListAssignmentsForHITRequest(input) return out, req.Send() } // ListAssignmentsForHITWithContext is the same as ListAssignmentsForHIT with the addition of // the ability to pass a context and additional request options. // // See ListAssignmentsForHIT for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListAssignmentsForHITWithContext(ctx aws.Context, input *ListAssignmentsForHITInput, opts ...request.Option) (*ListAssignmentsForHITOutput, error) { req, out := c.ListAssignmentsForHITRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } // ListAssignmentsForHITPages iterates over the pages of a ListAssignmentsForHIT operation, // calling the "fn" function with the response data for each page. To stop // iterating, return false from the fn function. // // See ListAssignmentsForHIT method for more information on how to use this operation. // // Note: This operation can generate multiple requests to a service. // // // Example iterating over at most 3 pages of a ListAssignmentsForHIT operation. // pageNum := 0 // err := client.ListAssignmentsForHITPages(params, // func(page *mturk.ListAssignmentsForHITOutput, lastPage bool) bool { // pageNum++ // fmt.Println(page) // return pageNum <= 3 // }) // func (c *MTurk) ListAssignmentsForHITPages(input *ListAssignmentsForHITInput, fn func(*ListAssignmentsForHITOutput, bool) bool) error { return c.ListAssignmentsForHITPagesWithContext(aws.BackgroundContext(), input, fn) } // ListAssignmentsForHITPagesWithContext same as ListAssignmentsForHITPages except // it takes a Context and allows setting request options on the pages. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListAssignmentsForHITPagesWithContext(ctx aws.Context, input *ListAssignmentsForHITInput, fn func(*ListAssignmentsForHITOutput, bool) bool, opts ...request.Option) error { p := request.Pagination{ NewRequest: func() (*request.Request, error) { var inCpy *ListAssignmentsForHITInput if input != nil { tmp := *input inCpy = &tmp } req, _ := c.ListAssignmentsForHITRequest(inCpy) req.SetContext(ctx) req.ApplyOptions(opts...) return req, nil }, } for p.Next() { if !fn(p.Page().(*ListAssignmentsForHITOutput), !p.HasNextPage()) { break } } return p.Err() } const opListBonusPayments = "ListBonusPayments" // ListBonusPaymentsRequest generates a "aws/request.Request" representing the // client's request for the ListBonusPayments operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See ListBonusPayments for more information on using the ListBonusPayments // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the ListBonusPaymentsRequest method. // req, resp := client.ListBonusPaymentsRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListBonusPayments func (c *MTurk) ListBonusPaymentsRequest(input *ListBonusPaymentsInput) (req *request.Request, output *ListBonusPaymentsOutput) { op := &request.Operation{ Name: opListBonusPayments, HTTPMethod: "POST", HTTPPath: "/", Paginator: &request.Paginator{ InputTokens: []string{"NextToken"}, OutputTokens: []string{"NextToken"}, LimitToken: "MaxResults", TruncationToken: "", }, } if input == nil { input = &ListBonusPaymentsInput{} } output = &ListBonusPaymentsOutput{} req = c.newRequest(op, input, output) return } // ListBonusPayments API operation for Amazon Mechanical Turk. // // The ListBonusPayments operation retrieves the amounts of bonuses you have // paid to Workers for a given HIT or assignment. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation ListBonusPayments for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListBonusPayments func (c *MTurk) ListBonusPayments(input *ListBonusPaymentsInput) (*ListBonusPaymentsOutput, error) { req, out := c.ListBonusPaymentsRequest(input) return out, req.Send() } // ListBonusPaymentsWithContext is the same as ListBonusPayments with the addition of // the ability to pass a context and additional request options. // // See ListBonusPayments for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListBonusPaymentsWithContext(ctx aws.Context, input *ListBonusPaymentsInput, opts ...request.Option) (*ListBonusPaymentsOutput, error) { req, out := c.ListBonusPaymentsRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } // ListBonusPaymentsPages iterates over the pages of a ListBonusPayments operation, // calling the "fn" function with the response data for each page. To stop // iterating, return false from the fn function. // // See ListBonusPayments method for more information on how to use this operation. // // Note: This operation can generate multiple requests to a service. // // // Example iterating over at most 3 pages of a ListBonusPayments operation. // pageNum := 0 // err := client.ListBonusPaymentsPages(params, // func(page *mturk.ListBonusPaymentsOutput, lastPage bool) bool { // pageNum++ // fmt.Println(page) // return pageNum <= 3 // }) // func (c *MTurk) ListBonusPaymentsPages(input *ListBonusPaymentsInput, fn func(*ListBonusPaymentsOutput, bool) bool) error { return c.ListBonusPaymentsPagesWithContext(aws.BackgroundContext(), input, fn) } // ListBonusPaymentsPagesWithContext same as ListBonusPaymentsPages except // it takes a Context and allows setting request options on the pages. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListBonusPaymentsPagesWithContext(ctx aws.Context, input *ListBonusPaymentsInput, fn func(*ListBonusPaymentsOutput, bool) bool, opts ...request.Option) error { p := request.Pagination{ NewRequest: func() (*request.Request, error) { var inCpy *ListBonusPaymentsInput if input != nil { tmp := *input inCpy = &tmp } req, _ := c.ListBonusPaymentsRequest(inCpy) req.SetContext(ctx) req.ApplyOptions(opts...) return req, nil }, } for p.Next() { if !fn(p.Page().(*ListBonusPaymentsOutput), !p.HasNextPage()) { break } } return p.Err() } const opListHITs = "ListHITs" // ListHITsRequest generates a "aws/request.Request" representing the // client's request for the ListHITs operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See ListHITs for more information on using the ListHITs // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the ListHITsRequest method. // req, resp := client.ListHITsRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListHITs func (c *MTurk) ListHITsRequest(input *ListHITsInput) (req *request.Request, output *ListHITsOutput) { op := &request.Operation{ Name: opListHITs, HTTPMethod: "POST", HTTPPath: "/", Paginator: &request.Paginator{ InputTokens: []string{"NextToken"}, OutputTokens: []string{"NextToken"}, LimitToken: "MaxResults", TruncationToken: "", }, } if input == nil { input = &ListHITsInput{} } output = &ListHITsOutput{} req = c.newRequest(op, input, output) return } // ListHITs API operation for Amazon Mechanical Turk. // // The ListHITs operation returns all of a Requester's HITs. The operation returns // HITs of any status, except for HITs that have been deleted of with the DeleteHIT // operation or that have been auto-deleted. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation ListHITs for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListHITs func (c *MTurk) ListHITs(input *ListHITsInput) (*ListHITsOutput, error) { req, out := c.ListHITsRequest(input) return out, req.Send() } // ListHITsWithContext is the same as ListHITs with the addition of // the ability to pass a context and additional request options. // // See ListHITs for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListHITsWithContext(ctx aws.Context, input *ListHITsInput, opts ...request.Option) (*ListHITsOutput, error) { req, out := c.ListHITsRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } // ListHITsPages iterates over the pages of a ListHITs operation, // calling the "fn" function with the response data for each page. To stop // iterating, return false from the fn function. // // See ListHITs method for more information on how to use this operation. // // Note: This operation can generate multiple requests to a service. // // // Example iterating over at most 3 pages of a ListHITs operation. // pageNum := 0 // err := client.ListHITsPages(params, // func(page *mturk.ListHITsOutput, lastPage bool) bool { // pageNum++ // fmt.Println(page) // return pageNum <= 3 // }) // func (c *MTurk) ListHITsPages(input *ListHITsInput, fn func(*ListHITsOutput, bool) bool) error { return c.ListHITsPagesWithContext(aws.BackgroundContext(), input, fn) } // ListHITsPagesWithContext same as ListHITsPages except // it takes a Context and allows setting request options on the pages. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListHITsPagesWithContext(ctx aws.Context, input *ListHITsInput, fn func(*ListHITsOutput, bool) bool, opts ...request.Option) error { p := request.Pagination{ NewRequest: func() (*request.Request, error) { var inCpy *ListHITsInput if input != nil { tmp := *input inCpy = &tmp } req, _ := c.ListHITsRequest(inCpy) req.SetContext(ctx) req.ApplyOptions(opts...) return req, nil }, } for p.Next() { if !fn(p.Page().(*ListHITsOutput), !p.HasNextPage()) { break } } return p.Err() } const opListHITsForQualificationType = "ListHITsForQualificationType" // ListHITsForQualificationTypeRequest generates a "aws/request.Request" representing the // client's request for the ListHITsForQualificationType operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See ListHITsForQualificationType for more information on using the ListHITsForQualificationType // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the ListHITsForQualificationTypeRequest method. // req, resp := client.ListHITsForQualificationTypeRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListHITsForQualificationType func (c *MTurk) ListHITsForQualificationTypeRequest(input *ListHITsForQualificationTypeInput) (req *request.Request, output *ListHITsForQualificationTypeOutput) { op := &request.Operation{ Name: opListHITsForQualificationType, HTTPMethod: "POST", HTTPPath: "/", Paginator: &request.Paginator{ InputTokens: []string{"NextToken"}, OutputTokens: []string{"NextToken"}, LimitToken: "MaxResults", TruncationToken: "", }, } if input == nil { input = &ListHITsForQualificationTypeInput{} } output = &ListHITsForQualificationTypeOutput{} req = c.newRequest(op, input, output) return } // ListHITsForQualificationType API operation for Amazon Mechanical Turk. // // The ListHITsForQualificationType operation returns the HITs that use the // given Qualification type for a Qualification requirement. The operation returns // HITs of any status, except for HITs that have been deleted with the DeleteHIT // operation or that have been auto-deleted. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation ListHITsForQualificationType for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListHITsForQualificationType func (c *MTurk) ListHITsForQualificationType(input *ListHITsForQualificationTypeInput) (*ListHITsForQualificationTypeOutput, error) { req, out := c.ListHITsForQualificationTypeRequest(input) return out, req.Send() } // ListHITsForQualificationTypeWithContext is the same as ListHITsForQualificationType with the addition of // the ability to pass a context and additional request options. // // See ListHITsForQualificationType for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListHITsForQualificationTypeWithContext(ctx aws.Context, input *ListHITsForQualificationTypeInput, opts ...request.Option) (*ListHITsForQualificationTypeOutput, error) { req, out := c.ListHITsForQualificationTypeRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } // ListHITsForQualificationTypePages iterates over the pages of a ListHITsForQualificationType operation, // calling the "fn" function with the response data for each page. To stop // iterating, return false from the fn function. // // See ListHITsForQualificationType method for more information on how to use this operation. // // Note: This operation can generate multiple requests to a service. // // // Example iterating over at most 3 pages of a ListHITsForQualificationType operation. // pageNum := 0 // err := client.ListHITsForQualificationTypePages(params, // func(page *mturk.ListHITsForQualificationTypeOutput, lastPage bool) bool { // pageNum++ // fmt.Println(page) // return pageNum <= 3 // }) // func (c *MTurk) ListHITsForQualificationTypePages(input *ListHITsForQualificationTypeInput, fn func(*ListHITsForQualificationTypeOutput, bool) bool) error { return c.ListHITsForQualificationTypePagesWithContext(aws.BackgroundContext(), input, fn) } // ListHITsForQualificationTypePagesWithContext same as ListHITsForQualificationTypePages except // it takes a Context and allows setting request options on the pages. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListHITsForQualificationTypePagesWithContext(ctx aws.Context, input *ListHITsForQualificationTypeInput, fn func(*ListHITsForQualificationTypeOutput, bool) bool, opts ...request.Option) error { p := request.Pagination{ NewRequest: func() (*request.Request, error) { var inCpy *ListHITsForQualificationTypeInput if input != nil { tmp := *input inCpy = &tmp } req, _ := c.ListHITsForQualificationTypeRequest(inCpy) req.SetContext(ctx) req.ApplyOptions(opts...) return req, nil }, } for p.Next() { if !fn(p.Page().(*ListHITsForQualificationTypeOutput), !p.HasNextPage()) { break } } return p.Err() } const opListQualificationRequests = "ListQualificationRequests" // ListQualificationRequestsRequest generates a "aws/request.Request" representing the // client's request for the ListQualificationRequests operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See ListQualificationRequests for more information on using the ListQualificationRequests // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the ListQualificationRequestsRequest method. // req, resp := client.ListQualificationRequestsRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListQualificationRequests func (c *MTurk) ListQualificationRequestsRequest(input *ListQualificationRequestsInput) (req *request.Request, output *ListQualificationRequestsOutput) { op := &request.Operation{ Name: opListQualificationRequests, HTTPMethod: "POST", HTTPPath: "/", Paginator: &request.Paginator{ InputTokens: []string{"NextToken"}, OutputTokens: []string{"NextToken"}, LimitToken: "MaxResults", TruncationToken: "", }, } if input == nil { input = &ListQualificationRequestsInput{} } output = &ListQualificationRequestsOutput{} req = c.newRequest(op, input, output) return } // ListQualificationRequests API operation for Amazon Mechanical Turk. // // The ListQualificationRequests operation retrieves requests for Qualifications // of a particular Qualification type. The owner of the Qualification type calls // this operation to poll for pending requests, and accepts them using the AcceptQualification // operation. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation ListQualificationRequests for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListQualificationRequests func (c *MTurk) ListQualificationRequests(input *ListQualificationRequestsInput) (*ListQualificationRequestsOutput, error) { req, out := c.ListQualificationRequestsRequest(input) return out, req.Send() } // ListQualificationRequestsWithContext is the same as ListQualificationRequests with the addition of // the ability to pass a context and additional request options. // // See ListQualificationRequests for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListQualificationRequestsWithContext(ctx aws.Context, input *ListQualificationRequestsInput, opts ...request.Option) (*ListQualificationRequestsOutput, error) { req, out := c.ListQualificationRequestsRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } // ListQualificationRequestsPages iterates over the pages of a ListQualificationRequests operation, // calling the "fn" function with the response data for each page. To stop // iterating, return false from the fn function. // // See ListQualificationRequests method for more information on how to use this operation. // // Note: This operation can generate multiple requests to a service. // // // Example iterating over at most 3 pages of a ListQualificationRequests operation. // pageNum := 0 // err := client.ListQualificationRequestsPages(params, // func(page *mturk.ListQualificationRequestsOutput, lastPage bool) bool { // pageNum++ // fmt.Println(page) // return pageNum <= 3 // }) // func (c *MTurk) ListQualificationRequestsPages(input *ListQualificationRequestsInput, fn func(*ListQualificationRequestsOutput, bool) bool) error { return c.ListQualificationRequestsPagesWithContext(aws.BackgroundContext(), input, fn) } // ListQualificationRequestsPagesWithContext same as ListQualificationRequestsPages except // it takes a Context and allows setting request options on the pages. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListQualificationRequestsPagesWithContext(ctx aws.Context, input *ListQualificationRequestsInput, fn func(*ListQualificationRequestsOutput, bool) bool, opts ...request.Option) error { p := request.Pagination{ NewRequest: func() (*request.Request, error) { var inCpy *ListQualificationRequestsInput if input != nil { tmp := *input inCpy = &tmp } req, _ := c.ListQualificationRequestsRequest(inCpy) req.SetContext(ctx) req.ApplyOptions(opts...) return req, nil }, } for p.Next() { if !fn(p.Page().(*ListQualificationRequestsOutput), !p.HasNextPage()) { break } } return p.Err() } const opListQualificationTypes = "ListQualificationTypes" // ListQualificationTypesRequest generates a "aws/request.Request" representing the // client's request for the ListQualificationTypes operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See ListQualificationTypes for more information on using the ListQualificationTypes // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the ListQualificationTypesRequest method. // req, resp := client.ListQualificationTypesRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListQualificationTypes func (c *MTurk) ListQualificationTypesRequest(input *ListQualificationTypesInput) (req *request.Request, output *ListQualificationTypesOutput) { op := &request.Operation{ Name: opListQualificationTypes, HTTPMethod: "POST", HTTPPath: "/", Paginator: &request.Paginator{ InputTokens: []string{"NextToken"}, OutputTokens: []string{"NextToken"}, LimitToken: "MaxResults", TruncationToken: "", }, } if input == nil { input = &ListQualificationTypesInput{} } output = &ListQualificationTypesOutput{} req = c.newRequest(op, input, output) return } // ListQualificationTypes API operation for Amazon Mechanical Turk. // // The ListQualificationTypes operation returns a list of Qualification types, // filtered by an optional search term. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation ListQualificationTypes for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListQualificationTypes func (c *MTurk) ListQualificationTypes(input *ListQualificationTypesInput) (*ListQualificationTypesOutput, error) { req, out := c.ListQualificationTypesRequest(input) return out, req.Send() } // ListQualificationTypesWithContext is the same as ListQualificationTypes with the addition of // the ability to pass a context and additional request options. // // See ListQualificationTypes for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListQualificationTypesWithContext(ctx aws.Context, input *ListQualificationTypesInput, opts ...request.Option) (*ListQualificationTypesOutput, error) { req, out := c.ListQualificationTypesRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } // ListQualificationTypesPages iterates over the pages of a ListQualificationTypes operation, // calling the "fn" function with the response data for each page. To stop // iterating, return false from the fn function. // // See ListQualificationTypes method for more information on how to use this operation. // // Note: This operation can generate multiple requests to a service. // // // Example iterating over at most 3 pages of a ListQualificationTypes operation. // pageNum := 0 // err := client.ListQualificationTypesPages(params, // func(page *mturk.ListQualificationTypesOutput, lastPage bool) bool { // pageNum++ // fmt.Println(page) // return pageNum <= 3 // }) // func (c *MTurk) ListQualificationTypesPages(input *ListQualificationTypesInput, fn func(*ListQualificationTypesOutput, bool) bool) error { return c.ListQualificationTypesPagesWithContext(aws.BackgroundContext(), input, fn) } // ListQualificationTypesPagesWithContext same as ListQualificationTypesPages except // it takes a Context and allows setting request options on the pages. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListQualificationTypesPagesWithContext(ctx aws.Context, input *ListQualificationTypesInput, fn func(*ListQualificationTypesOutput, bool) bool, opts ...request.Option) error { p := request.Pagination{ NewRequest: func() (*request.Request, error) { var inCpy *ListQualificationTypesInput if input != nil { tmp := *input inCpy = &tmp } req, _ := c.ListQualificationTypesRequest(inCpy) req.SetContext(ctx) req.ApplyOptions(opts...) return req, nil }, } for p.Next() { if !fn(p.Page().(*ListQualificationTypesOutput), !p.HasNextPage()) { break } } return p.Err() } const opListReviewPolicyResultsForHIT = "ListReviewPolicyResultsForHIT" // ListReviewPolicyResultsForHITRequest generates a "aws/request.Request" representing the // client's request for the ListReviewPolicyResultsForHIT operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See ListReviewPolicyResultsForHIT for more information on using the ListReviewPolicyResultsForHIT // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the ListReviewPolicyResultsForHITRequest method. // req, resp := client.ListReviewPolicyResultsForHITRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListReviewPolicyResultsForHIT func (c *MTurk) ListReviewPolicyResultsForHITRequest(input *ListReviewPolicyResultsForHITInput) (req *request.Request, output *ListReviewPolicyResultsForHITOutput) { op := &request.Operation{ Name: opListReviewPolicyResultsForHIT, HTTPMethod: "POST", HTTPPath: "/", Paginator: &request.Paginator{ InputTokens: []string{"NextToken"}, OutputTokens: []string{"NextToken"}, LimitToken: "MaxResults", TruncationToken: "", }, } if input == nil { input = &ListReviewPolicyResultsForHITInput{} } output = &ListReviewPolicyResultsForHITOutput{} req = c.newRequest(op, input, output) return } // ListReviewPolicyResultsForHIT API operation for Amazon Mechanical Turk. // // The ListReviewPolicyResultsForHIT operation retrieves the computed results // and the actions taken in the course of executing your Review Policies for // a given HIT. For information about how to specify Review Policies when you // call CreateHIT, see Review Policies. The ListReviewPolicyResultsForHIT operation // can return results for both Assignment-level and HIT-level review results. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation ListReviewPolicyResultsForHIT for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListReviewPolicyResultsForHIT func (c *MTurk) ListReviewPolicyResultsForHIT(input *ListReviewPolicyResultsForHITInput) (*ListReviewPolicyResultsForHITOutput, error) { req, out := c.ListReviewPolicyResultsForHITRequest(input) return out, req.Send() } // ListReviewPolicyResultsForHITWithContext is the same as ListReviewPolicyResultsForHIT with the addition of // the ability to pass a context and additional request options. // // See ListReviewPolicyResultsForHIT for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListReviewPolicyResultsForHITWithContext(ctx aws.Context, input *ListReviewPolicyResultsForHITInput, opts ...request.Option) (*ListReviewPolicyResultsForHITOutput, error) { req, out := c.ListReviewPolicyResultsForHITRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } // ListReviewPolicyResultsForHITPages iterates over the pages of a ListReviewPolicyResultsForHIT operation, // calling the "fn" function with the response data for each page. To stop // iterating, return false from the fn function. // // See ListReviewPolicyResultsForHIT method for more information on how to use this operation. // // Note: This operation can generate multiple requests to a service. // // // Example iterating over at most 3 pages of a ListReviewPolicyResultsForHIT operation. // pageNum := 0 // err := client.ListReviewPolicyResultsForHITPages(params, // func(page *mturk.ListReviewPolicyResultsForHITOutput, lastPage bool) bool { // pageNum++ // fmt.Println(page) // return pageNum <= 3 // }) // func (c *MTurk) ListReviewPolicyResultsForHITPages(input *ListReviewPolicyResultsForHITInput, fn func(*ListReviewPolicyResultsForHITOutput, bool) bool) error { return c.ListReviewPolicyResultsForHITPagesWithContext(aws.BackgroundContext(), input, fn) } // ListReviewPolicyResultsForHITPagesWithContext same as ListReviewPolicyResultsForHITPages except // it takes a Context and allows setting request options on the pages. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListReviewPolicyResultsForHITPagesWithContext(ctx aws.Context, input *ListReviewPolicyResultsForHITInput, fn func(*ListReviewPolicyResultsForHITOutput, bool) bool, opts ...request.Option) error { p := request.Pagination{ NewRequest: func() (*request.Request, error) { var inCpy *ListReviewPolicyResultsForHITInput if input != nil { tmp := *input inCpy = &tmp } req, _ := c.ListReviewPolicyResultsForHITRequest(inCpy) req.SetContext(ctx) req.ApplyOptions(opts...) return req, nil }, } for p.Next() { if !fn(p.Page().(*ListReviewPolicyResultsForHITOutput), !p.HasNextPage()) { break } } return p.Err() } const opListReviewableHITs = "ListReviewableHITs" // ListReviewableHITsRequest generates a "aws/request.Request" representing the // client's request for the ListReviewableHITs operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See ListReviewableHITs for more information on using the ListReviewableHITs // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the ListReviewableHITsRequest method. // req, resp := client.ListReviewableHITsRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListReviewableHITs func (c *MTurk) ListReviewableHITsRequest(input *ListReviewableHITsInput) (req *request.Request, output *ListReviewableHITsOutput) { op := &request.Operation{ Name: opListReviewableHITs, HTTPMethod: "POST", HTTPPath: "/", Paginator: &request.Paginator{ InputTokens: []string{"NextToken"}, OutputTokens: []string{"NextToken"}, LimitToken: "MaxResults", TruncationToken: "", }, } if input == nil { input = &ListReviewableHITsInput{} } output = &ListReviewableHITsOutput{} req = c.newRequest(op, input, output) return } // ListReviewableHITs API operation for Amazon Mechanical Turk. // // The ListReviewableHITs operation retrieves the HITs with Status equal to // Reviewable or Status equal to Reviewing that belong to the Requester calling // the operation. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation ListReviewableHITs for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListReviewableHITs func (c *MTurk) ListReviewableHITs(input *ListReviewableHITsInput) (*ListReviewableHITsOutput, error) { req, out := c.ListReviewableHITsRequest(input) return out, req.Send() } // ListReviewableHITsWithContext is the same as ListReviewableHITs with the addition of // the ability to pass a context and additional request options. // // See ListReviewableHITs for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListReviewableHITsWithContext(ctx aws.Context, input *ListReviewableHITsInput, opts ...request.Option) (*ListReviewableHITsOutput, error) { req, out := c.ListReviewableHITsRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } // ListReviewableHITsPages iterates over the pages of a ListReviewableHITs operation, // calling the "fn" function with the response data for each page. To stop // iterating, return false from the fn function. // // See ListReviewableHITs method for more information on how to use this operation. // // Note: This operation can generate multiple requests to a service. // // // Example iterating over at most 3 pages of a ListReviewableHITs operation. // pageNum := 0 // err := client.ListReviewableHITsPages(params, // func(page *mturk.ListReviewableHITsOutput, lastPage bool) bool { // pageNum++ // fmt.Println(page) // return pageNum <= 3 // }) // func (c *MTurk) ListReviewableHITsPages(input *ListReviewableHITsInput, fn func(*ListReviewableHITsOutput, bool) bool) error { return c.ListReviewableHITsPagesWithContext(aws.BackgroundContext(), input, fn) } // ListReviewableHITsPagesWithContext same as ListReviewableHITsPages except // it takes a Context and allows setting request options on the pages. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListReviewableHITsPagesWithContext(ctx aws.Context, input *ListReviewableHITsInput, fn func(*ListReviewableHITsOutput, bool) bool, opts ...request.Option) error { p := request.Pagination{ NewRequest: func() (*request.Request, error) { var inCpy *ListReviewableHITsInput if input != nil { tmp := *input inCpy = &tmp } req, _ := c.ListReviewableHITsRequest(inCpy) req.SetContext(ctx) req.ApplyOptions(opts...) return req, nil }, } for p.Next() { if !fn(p.Page().(*ListReviewableHITsOutput), !p.HasNextPage()) { break } } return p.Err() } const opListWorkerBlocks = "ListWorkerBlocks" // ListWorkerBlocksRequest generates a "aws/request.Request" representing the // client's request for the ListWorkerBlocks operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See ListWorkerBlocks for more information on using the ListWorkerBlocks // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the ListWorkerBlocksRequest method. // req, resp := client.ListWorkerBlocksRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListWorkerBlocks func (c *MTurk) ListWorkerBlocksRequest(input *ListWorkerBlocksInput) (req *request.Request, output *ListWorkerBlocksOutput) { op := &request.Operation{ Name: opListWorkerBlocks, HTTPMethod: "POST", HTTPPath: "/", Paginator: &request.Paginator{ InputTokens: []string{"NextToken"}, OutputTokens: []string{"NextToken"}, LimitToken: "MaxResults", TruncationToken: "", }, } if input == nil { input = &ListWorkerBlocksInput{} } output = &ListWorkerBlocksOutput{} req = c.newRequest(op, input, output) return } // ListWorkerBlocks API operation for Amazon Mechanical Turk. // // The ListWorkersBlocks operation retrieves a list of Workers who are blocked // from working on your HITs. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation ListWorkerBlocks for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListWorkerBlocks func (c *MTurk) ListWorkerBlocks(input *ListWorkerBlocksInput) (*ListWorkerBlocksOutput, error) { req, out := c.ListWorkerBlocksRequest(input) return out, req.Send() } // ListWorkerBlocksWithContext is the same as ListWorkerBlocks with the addition of // the ability to pass a context and additional request options. // // See ListWorkerBlocks for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListWorkerBlocksWithContext(ctx aws.Context, input *ListWorkerBlocksInput, opts ...request.Option) (*ListWorkerBlocksOutput, error) { req, out := c.ListWorkerBlocksRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } // ListWorkerBlocksPages iterates over the pages of a ListWorkerBlocks operation, // calling the "fn" function with the response data for each page. To stop // iterating, return false from the fn function. // // See ListWorkerBlocks method for more information on how to use this operation. // // Note: This operation can generate multiple requests to a service. // // // Example iterating over at most 3 pages of a ListWorkerBlocks operation. // pageNum := 0 // err := client.ListWorkerBlocksPages(params, // func(page *mturk.ListWorkerBlocksOutput, lastPage bool) bool { // pageNum++ // fmt.Println(page) // return pageNum <= 3 // }) // func (c *MTurk) ListWorkerBlocksPages(input *ListWorkerBlocksInput, fn func(*ListWorkerBlocksOutput, bool) bool) error { return c.ListWorkerBlocksPagesWithContext(aws.BackgroundContext(), input, fn) } // ListWorkerBlocksPagesWithContext same as ListWorkerBlocksPages except // it takes a Context and allows setting request options on the pages. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListWorkerBlocksPagesWithContext(ctx aws.Context, input *ListWorkerBlocksInput, fn func(*ListWorkerBlocksOutput, bool) bool, opts ...request.Option) error { p := request.Pagination{ NewRequest: func() (*request.Request, error) { var inCpy *ListWorkerBlocksInput if input != nil { tmp := *input inCpy = &tmp } req, _ := c.ListWorkerBlocksRequest(inCpy) req.SetContext(ctx) req.ApplyOptions(opts...) return req, nil }, } for p.Next() { if !fn(p.Page().(*ListWorkerBlocksOutput), !p.HasNextPage()) { break } } return p.Err() } const opListWorkersWithQualificationType = "ListWorkersWithQualificationType" // ListWorkersWithQualificationTypeRequest generates a "aws/request.Request" representing the // client's request for the ListWorkersWithQualificationType operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See ListWorkersWithQualificationType for more information on using the ListWorkersWithQualificationType // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the ListWorkersWithQualificationTypeRequest method. // req, resp := client.ListWorkersWithQualificationTypeRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListWorkersWithQualificationType func (c *MTurk) ListWorkersWithQualificationTypeRequest(input *ListWorkersWithQualificationTypeInput) (req *request.Request, output *ListWorkersWithQualificationTypeOutput) { op := &request.Operation{ Name: opListWorkersWithQualificationType, HTTPMethod: "POST", HTTPPath: "/", Paginator: &request.Paginator{ InputTokens: []string{"NextToken"}, OutputTokens: []string{"NextToken"}, LimitToken: "MaxResults", TruncationToken: "", }, } if input == nil { input = &ListWorkersWithQualificationTypeInput{} } output = &ListWorkersWithQualificationTypeOutput{} req = c.newRequest(op, input, output) return } // ListWorkersWithQualificationType API operation for Amazon Mechanical Turk. // // The ListWorkersWithQualificationType operation returns all of the Workers // that have been associated with a given Qualification type. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation ListWorkersWithQualificationType for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListWorkersWithQualificationType func (c *MTurk) ListWorkersWithQualificationType(input *ListWorkersWithQualificationTypeInput) (*ListWorkersWithQualificationTypeOutput, error) { req, out := c.ListWorkersWithQualificationTypeRequest(input) return out, req.Send() } // ListWorkersWithQualificationTypeWithContext is the same as ListWorkersWithQualificationType with the addition of // the ability to pass a context and additional request options. // // See ListWorkersWithQualificationType for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListWorkersWithQualificationTypeWithContext(ctx aws.Context, input *ListWorkersWithQualificationTypeInput, opts ...request.Option) (*ListWorkersWithQualificationTypeOutput, error) { req, out := c.ListWorkersWithQualificationTypeRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } // ListWorkersWithQualificationTypePages iterates over the pages of a ListWorkersWithQualificationType operation, // calling the "fn" function with the response data for each page. To stop // iterating, return false from the fn function. // // See ListWorkersWithQualificationType method for more information on how to use this operation. // // Note: This operation can generate multiple requests to a service. // // // Example iterating over at most 3 pages of a ListWorkersWithQualificationType operation. // pageNum := 0 // err := client.ListWorkersWithQualificationTypePages(params, // func(page *mturk.ListWorkersWithQualificationTypeOutput, lastPage bool) bool { // pageNum++ // fmt.Println(page) // return pageNum <= 3 // }) // func (c *MTurk) ListWorkersWithQualificationTypePages(input *ListWorkersWithQualificationTypeInput, fn func(*ListWorkersWithQualificationTypeOutput, bool) bool) error { return c.ListWorkersWithQualificationTypePagesWithContext(aws.BackgroundContext(), input, fn) } // ListWorkersWithQualificationTypePagesWithContext same as ListWorkersWithQualificationTypePages except // it takes a Context and allows setting request options on the pages. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) ListWorkersWithQualificationTypePagesWithContext(ctx aws.Context, input *ListWorkersWithQualificationTypeInput, fn func(*ListWorkersWithQualificationTypeOutput, bool) bool, opts ...request.Option) error { p := request.Pagination{ NewRequest: func() (*request.Request, error) { var inCpy *ListWorkersWithQualificationTypeInput if input != nil { tmp := *input inCpy = &tmp } req, _ := c.ListWorkersWithQualificationTypeRequest(inCpy) req.SetContext(ctx) req.ApplyOptions(opts...) return req, nil }, } for p.Next() { if !fn(p.Page().(*ListWorkersWithQualificationTypeOutput), !p.HasNextPage()) { break } } return p.Err() } const opNotifyWorkers = "NotifyWorkers" // NotifyWorkersRequest generates a "aws/request.Request" representing the // client's request for the NotifyWorkers operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See NotifyWorkers for more information on using the NotifyWorkers // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the NotifyWorkersRequest method. // req, resp := client.NotifyWorkersRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/NotifyWorkers func (c *MTurk) NotifyWorkersRequest(input *NotifyWorkersInput) (req *request.Request, output *NotifyWorkersOutput) { op := &request.Operation{ Name: opNotifyWorkers, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &NotifyWorkersInput{} } output = &NotifyWorkersOutput{} req = c.newRequest(op, input, output) return } // NotifyWorkers API operation for Amazon Mechanical Turk. // // The NotifyWorkers operation sends an email to one or more Workers that you // specify with the Worker ID. You can specify up to 100 Worker IDs to send // the same message with a single call to the NotifyWorkers operation. The NotifyWorkers // operation will send a notification email to a Worker only if you have previously // approved or rejected work from the Worker. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation NotifyWorkers for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/NotifyWorkers func (c *MTurk) NotifyWorkers(input *NotifyWorkersInput) (*NotifyWorkersOutput, error) { req, out := c.NotifyWorkersRequest(input) return out, req.Send() } // NotifyWorkersWithContext is the same as NotifyWorkers with the addition of // the ability to pass a context and additional request options. // // See NotifyWorkers for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) NotifyWorkersWithContext(ctx aws.Context, input *NotifyWorkersInput, opts ...request.Option) (*NotifyWorkersOutput, error) { req, out := c.NotifyWorkersRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opRejectAssignment = "RejectAssignment" // RejectAssignmentRequest generates a "aws/request.Request" representing the // client's request for the RejectAssignment operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See RejectAssignment for more information on using the RejectAssignment // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the RejectAssignmentRequest method. // req, resp := client.RejectAssignmentRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/RejectAssignment func (c *MTurk) RejectAssignmentRequest(input *RejectAssignmentInput) (req *request.Request, output *RejectAssignmentOutput) { op := &request.Operation{ Name: opRejectAssignment, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &RejectAssignmentInput{} } output = &RejectAssignmentOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // RejectAssignment API operation for Amazon Mechanical Turk. // // The RejectAssignment operation rejects the results of a completed assignment. // // You can include an optional feedback message with the rejection, which the // Worker can see in the Status section of the web site. When you include a // feedback message with the rejection, it helps the Worker understand why the // assignment was rejected, and can improve the quality of the results the Worker // submits in the future. // // Only the Requester who created the HIT can reject an assignment for the HIT. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation RejectAssignment for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/RejectAssignment func (c *MTurk) RejectAssignment(input *RejectAssignmentInput) (*RejectAssignmentOutput, error) { req, out := c.RejectAssignmentRequest(input) return out, req.Send() } // RejectAssignmentWithContext is the same as RejectAssignment with the addition of // the ability to pass a context and additional request options. // // See RejectAssignment for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) RejectAssignmentWithContext(ctx aws.Context, input *RejectAssignmentInput, opts ...request.Option) (*RejectAssignmentOutput, error) { req, out := c.RejectAssignmentRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opRejectQualificationRequest = "RejectQualificationRequest" // RejectQualificationRequestRequest generates a "aws/request.Request" representing the // client's request for the RejectQualificationRequest operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See RejectQualificationRequest for more information on using the RejectQualificationRequest // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the RejectQualificationRequestRequest method. // req, resp := client.RejectQualificationRequestRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/RejectQualificationRequest func (c *MTurk) RejectQualificationRequestRequest(input *RejectQualificationRequestInput) (req *request.Request, output *RejectQualificationRequestOutput) { op := &request.Operation{ Name: opRejectQualificationRequest, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &RejectQualificationRequestInput{} } output = &RejectQualificationRequestOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // RejectQualificationRequest API operation for Amazon Mechanical Turk. // // The RejectQualificationRequest operation rejects a user's request for a Qualification. // // You can provide a text message explaining why the request was rejected. The // Worker who made the request can see this message. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation RejectQualificationRequest for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/RejectQualificationRequest func (c *MTurk) RejectQualificationRequest(input *RejectQualificationRequestInput) (*RejectQualificationRequestOutput, error) { req, out := c.RejectQualificationRequestRequest(input) return out, req.Send() } // RejectQualificationRequestWithContext is the same as RejectQualificationRequest with the addition of // the ability to pass a context and additional request options. // // See RejectQualificationRequest for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) RejectQualificationRequestWithContext(ctx aws.Context, input *RejectQualificationRequestInput, opts ...request.Option) (*RejectQualificationRequestOutput, error) { req, out := c.RejectQualificationRequestRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opSendBonus = "SendBonus" // SendBonusRequest generates a "aws/request.Request" representing the // client's request for the SendBonus operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See SendBonus for more information on using the SendBonus // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the SendBonusRequest method. // req, resp := client.SendBonusRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/SendBonus func (c *MTurk) SendBonusRequest(input *SendBonusInput) (req *request.Request, output *SendBonusOutput) { op := &request.Operation{ Name: opSendBonus, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &SendBonusInput{} } output = &SendBonusOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // SendBonus API operation for Amazon Mechanical Turk. // // The SendBonus operation issues a payment of money from your account to a // Worker. This payment happens separately from the reward you pay to the Worker // when you approve the Worker's assignment. The SendBonus operation requires // the Worker's ID and the assignment ID as parameters to initiate payment of // the bonus. You must include a message that explains the reason for the bonus // payment, as the Worker may not be expecting the payment. Amazon Mechanical // Turk collects a fee for bonus payments, similar to the HIT listing fee. This // operation fails if your account does not have enough funds to pay for both // the bonus and the fees. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation SendBonus for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/SendBonus func (c *MTurk) SendBonus(input *SendBonusInput) (*SendBonusOutput, error) { req, out := c.SendBonusRequest(input) return out, req.Send() } // SendBonusWithContext is the same as SendBonus with the addition of // the ability to pass a context and additional request options. // // See SendBonus for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) SendBonusWithContext(ctx aws.Context, input *SendBonusInput, opts ...request.Option) (*SendBonusOutput, error) { req, out := c.SendBonusRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opSendTestEventNotification = "SendTestEventNotification" // SendTestEventNotificationRequest generates a "aws/request.Request" representing the // client's request for the SendTestEventNotification operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See SendTestEventNotification for more information on using the SendTestEventNotification // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the SendTestEventNotificationRequest method. // req, resp := client.SendTestEventNotificationRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/SendTestEventNotification func (c *MTurk) SendTestEventNotificationRequest(input *SendTestEventNotificationInput) (req *request.Request, output *SendTestEventNotificationOutput) { op := &request.Operation{ Name: opSendTestEventNotification, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &SendTestEventNotificationInput{} } output = &SendTestEventNotificationOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // SendTestEventNotification API operation for Amazon Mechanical Turk. // // The SendTestEventNotification operation causes Amazon Mechanical Turk to // send a notification message as if a HIT event occurred, according to the // provided notification specification. This allows you to test notifications // without setting up notifications for a real HIT type and trying to trigger // them using the website. When you call this operation, the service attempts // to send the test notification immediately. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation SendTestEventNotification for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/SendTestEventNotification func (c *MTurk) SendTestEventNotification(input *SendTestEventNotificationInput) (*SendTestEventNotificationOutput, error) { req, out := c.SendTestEventNotificationRequest(input) return out, req.Send() } // SendTestEventNotificationWithContext is the same as SendTestEventNotification with the addition of // the ability to pass a context and additional request options. // // See SendTestEventNotification for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) SendTestEventNotificationWithContext(ctx aws.Context, input *SendTestEventNotificationInput, opts ...request.Option) (*SendTestEventNotificationOutput, error) { req, out := c.SendTestEventNotificationRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opUpdateExpirationForHIT = "UpdateExpirationForHIT" // UpdateExpirationForHITRequest generates a "aws/request.Request" representing the // client's request for the UpdateExpirationForHIT operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See UpdateExpirationForHIT for more information on using the UpdateExpirationForHIT // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the UpdateExpirationForHITRequest method. // req, resp := client.UpdateExpirationForHITRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/UpdateExpirationForHIT func (c *MTurk) UpdateExpirationForHITRequest(input *UpdateExpirationForHITInput) (req *request.Request, output *UpdateExpirationForHITOutput) { op := &request.Operation{ Name: opUpdateExpirationForHIT, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &UpdateExpirationForHITInput{} } output = &UpdateExpirationForHITOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // UpdateExpirationForHIT API operation for Amazon Mechanical Turk. // // The UpdateExpirationForHIT operation allows you update the expiration time // of a HIT. If you update it to a time in the past, the HIT will be immediately // expired. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation UpdateExpirationForHIT for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/UpdateExpirationForHIT func (c *MTurk) UpdateExpirationForHIT(input *UpdateExpirationForHITInput) (*UpdateExpirationForHITOutput, error) { req, out := c.UpdateExpirationForHITRequest(input) return out, req.Send() } // UpdateExpirationForHITWithContext is the same as UpdateExpirationForHIT with the addition of // the ability to pass a context and additional request options. // // See UpdateExpirationForHIT for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) UpdateExpirationForHITWithContext(ctx aws.Context, input *UpdateExpirationForHITInput, opts ...request.Option) (*UpdateExpirationForHITOutput, error) { req, out := c.UpdateExpirationForHITRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opUpdateHITReviewStatus = "UpdateHITReviewStatus" // UpdateHITReviewStatusRequest generates a "aws/request.Request" representing the // client's request for the UpdateHITReviewStatus operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See UpdateHITReviewStatus for more information on using the UpdateHITReviewStatus // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the UpdateHITReviewStatusRequest method. // req, resp := client.UpdateHITReviewStatusRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/UpdateHITReviewStatus func (c *MTurk) UpdateHITReviewStatusRequest(input *UpdateHITReviewStatusInput) (req *request.Request, output *UpdateHITReviewStatusOutput) { op := &request.Operation{ Name: opUpdateHITReviewStatus, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &UpdateHITReviewStatusInput{} } output = &UpdateHITReviewStatusOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // UpdateHITReviewStatus API operation for Amazon Mechanical Turk. // // The UpdateHITReviewStatus operation updates the status of a HIT. If the status // is Reviewable, this operation can update the status to Reviewing, or it can // revert a Reviewing HIT back to the Reviewable status. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation UpdateHITReviewStatus for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/UpdateHITReviewStatus func (c *MTurk) UpdateHITReviewStatus(input *UpdateHITReviewStatusInput) (*UpdateHITReviewStatusOutput, error) { req, out := c.UpdateHITReviewStatusRequest(input) return out, req.Send() } // UpdateHITReviewStatusWithContext is the same as UpdateHITReviewStatus with the addition of // the ability to pass a context and additional request options. // // See UpdateHITReviewStatus for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) UpdateHITReviewStatusWithContext(ctx aws.Context, input *UpdateHITReviewStatusInput, opts ...request.Option) (*UpdateHITReviewStatusOutput, error) { req, out := c.UpdateHITReviewStatusRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opUpdateHITTypeOfHIT = "UpdateHITTypeOfHIT" // UpdateHITTypeOfHITRequest generates a "aws/request.Request" representing the // client's request for the UpdateHITTypeOfHIT operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See UpdateHITTypeOfHIT for more information on using the UpdateHITTypeOfHIT // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the UpdateHITTypeOfHITRequest method. // req, resp := client.UpdateHITTypeOfHITRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/UpdateHITTypeOfHIT func (c *MTurk) UpdateHITTypeOfHITRequest(input *UpdateHITTypeOfHITInput) (req *request.Request, output *UpdateHITTypeOfHITOutput) { op := &request.Operation{ Name: opUpdateHITTypeOfHIT, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &UpdateHITTypeOfHITInput{} } output = &UpdateHITTypeOfHITOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // UpdateHITTypeOfHIT API operation for Amazon Mechanical Turk. // // The UpdateHITTypeOfHIT operation allows you to change the HITType properties // of a HIT. This operation disassociates the HIT from its old HITType properties // and associates it with the new HITType properties. The HIT takes on the properties // of the new HITType in place of the old ones. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation UpdateHITTypeOfHIT for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/UpdateHITTypeOfHIT func (c *MTurk) UpdateHITTypeOfHIT(input *UpdateHITTypeOfHITInput) (*UpdateHITTypeOfHITOutput, error) { req, out := c.UpdateHITTypeOfHITRequest(input) return out, req.Send() } // UpdateHITTypeOfHITWithContext is the same as UpdateHITTypeOfHIT with the addition of // the ability to pass a context and additional request options. // // See UpdateHITTypeOfHIT for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) UpdateHITTypeOfHITWithContext(ctx aws.Context, input *UpdateHITTypeOfHITInput, opts ...request.Option) (*UpdateHITTypeOfHITOutput, error) { req, out := c.UpdateHITTypeOfHITRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opUpdateNotificationSettings = "UpdateNotificationSettings" // UpdateNotificationSettingsRequest generates a "aws/request.Request" representing the // client's request for the UpdateNotificationSettings operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See UpdateNotificationSettings for more information on using the UpdateNotificationSettings // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the UpdateNotificationSettingsRequest method. // req, resp := client.UpdateNotificationSettingsRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/UpdateNotificationSettings func (c *MTurk) UpdateNotificationSettingsRequest(input *UpdateNotificationSettingsInput) (req *request.Request, output *UpdateNotificationSettingsOutput) { op := &request.Operation{ Name: opUpdateNotificationSettings, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &UpdateNotificationSettingsInput{} } output = &UpdateNotificationSettingsOutput{} req = c.newRequest(op, input, output) req.Handlers.Unmarshal.Swap(jsonrpc.UnmarshalHandler.Name, protocol.UnmarshalDiscardBodyHandler) return } // UpdateNotificationSettings API operation for Amazon Mechanical Turk. // // The UpdateNotificationSettings operation creates, updates, disables or re-enables // notifications for a HIT type. If you call the UpdateNotificationSettings // operation for a HIT type that already has a notification specification, the // operation replaces the old specification with a new one. You can call the // UpdateNotificationSettings operation to enable or disable notifications for // the HIT type, without having to modify the notification specification itself // by providing updates to the Active status without specifying a new notification // specification. To change the Active status of a HIT type's notifications, // the HIT type must already have a notification specification, or one must // be provided in the same call to UpdateNotificationSettings. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation UpdateNotificationSettings for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/UpdateNotificationSettings func (c *MTurk) UpdateNotificationSettings(input *UpdateNotificationSettingsInput) (*UpdateNotificationSettingsOutput, error) { req, out := c.UpdateNotificationSettingsRequest(input) return out, req.Send() } // UpdateNotificationSettingsWithContext is the same as UpdateNotificationSettings with the addition of // the ability to pass a context and additional request options. // // See UpdateNotificationSettings for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) UpdateNotificationSettingsWithContext(ctx aws.Context, input *UpdateNotificationSettingsInput, opts ...request.Option) (*UpdateNotificationSettingsOutput, error) { req, out := c.UpdateNotificationSettingsRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } const opUpdateQualificationType = "UpdateQualificationType" // UpdateQualificationTypeRequest generates a "aws/request.Request" representing the // client's request for the UpdateQualificationType operation. The "output" return // value will be populated with the request's response once the request completes // successfully. // // Use "Send" method on the returned Request to send the API call to the service. // the "output" return value is not valid until after Send returns without error. // // See UpdateQualificationType for more information on using the UpdateQualificationType // API call, and error handling. // // This method is useful when you want to inject custom logic or configuration // into the SDK's request lifecycle. Such as custom headers, or retry logic. // // // // Example sending a request using the UpdateQualificationTypeRequest method. // req, resp := client.UpdateQualificationTypeRequest(params) // // err := req.Send() // if err == nil { // resp is now filled // fmt.Println(resp) // } // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/UpdateQualificationType func (c *MTurk) UpdateQualificationTypeRequest(input *UpdateQualificationTypeInput) (req *request.Request, output *UpdateQualificationTypeOutput) { op := &request.Operation{ Name: opUpdateQualificationType, HTTPMethod: "POST", HTTPPath: "/", } if input == nil { input = &UpdateQualificationTypeInput{} } output = &UpdateQualificationTypeOutput{} req = c.newRequest(op, input, output) return } // UpdateQualificationType API operation for Amazon Mechanical Turk. // // The UpdateQualificationType operation modifies the attributes of an existing // Qualification type, which is represented by a QualificationType data structure. // Only the owner of a Qualification type can modify its attributes. // // Most attributes of a Qualification type can be changed after the type has // been created. However, the Name and Keywords fields cannot be modified. The // RetryDelayInSeconds parameter can be modified or added to change the delay // or to enable retries, but RetryDelayInSeconds cannot be used to disable retries. // // You can use this operation to update the test for a Qualification type. The // test is updated based on the values specified for the Test, TestDurationInSeconds // and AnswerKey parameters. All three parameters specify the updated test. // If you are updating the test for a type, you must specify the Test and TestDurationInSeconds // parameters. The AnswerKey parameter is optional; omitting it specifies that // the updated test does not have an answer key. // // If you omit the Test parameter, the test for the Qualification type is unchanged. // There is no way to remove a test from a Qualification type that has one. // If the type already has a test, you cannot update it to be AutoGranted. If // the Qualification type does not have a test and one is provided by an update, // the type will henceforth have a test. // // If you want to update the test duration or answer key for an existing test // without changing the questions, you must specify a Test parameter with the // original questions, along with the updated values. // // If you provide an updated Test but no AnswerKey, the new test will not have // an answer key. Requests for such Qualifications must be granted manually. // // You can also update the AutoGranted and AutoGrantedValue attributes of the // Qualification type. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about // the error. // // See the AWS API reference guide for Amazon Mechanical Turk's // API operation UpdateQualificationType for usage and error information. // // Returned Error Types: // * ServiceFault // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. // // * RequestError // Your request is invalid. // // See also, https://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/UpdateQualificationType func (c *MTurk) UpdateQualificationType(input *UpdateQualificationTypeInput) (*UpdateQualificationTypeOutput, error) { req, out := c.UpdateQualificationTypeRequest(input) return out, req.Send() } // UpdateQualificationTypeWithContext is the same as UpdateQualificationType with the addition of // the ability to pass a context and additional request options. // // See UpdateQualificationType for details on how to use this API operation. // // The context must be non-nil and will be used for request cancellation. If // the context is nil a panic will occur. In the future the SDK may create // sub-contexts for http.Requests. See https://golang.org/pkg/context/ // for more information on using Contexts. func (c *MTurk) UpdateQualificationTypeWithContext(ctx aws.Context, input *UpdateQualificationTypeInput, opts ...request.Option) (*UpdateQualificationTypeOutput, error) { req, out := c.UpdateQualificationTypeRequest(input) req.SetContext(ctx) req.ApplyOptions(opts...) return out, req.Send() } type AcceptQualificationRequestInput struct { _ struct{} `type:"structure"` // The value of the Qualification. You can omit this value if you are using // the presence or absence of the Qualification as the basis for a HIT requirement. IntegerValue *int64 `type:"integer"` // The ID of the Qualification request, as returned by the GetQualificationRequests // operation. // // QualificationRequestId is a required field QualificationRequestId *string `type:"string" required:"true"` } // String returns the string representation func (s AcceptQualificationRequestInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s AcceptQualificationRequestInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *AcceptQualificationRequestInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "AcceptQualificationRequestInput"} if s.QualificationRequestId == nil { invalidParams.Add(request.NewErrParamRequired("QualificationRequestId")) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetIntegerValue sets the IntegerValue field's value. func (s *AcceptQualificationRequestInput) SetIntegerValue(v int64) *AcceptQualificationRequestInput { s.IntegerValue = &v return s } // SetQualificationRequestId sets the QualificationRequestId field's value. func (s *AcceptQualificationRequestInput) SetQualificationRequestId(v string) *AcceptQualificationRequestInput { s.QualificationRequestId = &v return s } type AcceptQualificationRequestOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s AcceptQualificationRequestOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s AcceptQualificationRequestOutput) GoString() string { return s.String() } type ApproveAssignmentInput struct { _ struct{} `type:"structure"` // The ID of the assignment. The assignment must correspond to a HIT created // by the Requester. // // AssignmentId is a required field AssignmentId *string `min:"1" type:"string" required:"true"` // A flag indicating that an assignment should be approved even if it was previously // rejected. Defaults to False. OverrideRejection *bool `type:"boolean"` // A message for the Worker, which the Worker can see in the Status section // of the web site. RequesterFeedback *string `type:"string"` } // String returns the string representation func (s ApproveAssignmentInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ApproveAssignmentInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *ApproveAssignmentInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "ApproveAssignmentInput"} if s.AssignmentId == nil { invalidParams.Add(request.NewErrParamRequired("AssignmentId")) } if s.AssignmentId != nil && len(*s.AssignmentId) < 1 { invalidParams.Add(request.NewErrParamMinLen("AssignmentId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAssignmentId sets the AssignmentId field's value. func (s *ApproveAssignmentInput) SetAssignmentId(v string) *ApproveAssignmentInput { s.AssignmentId = &v return s } // SetOverrideRejection sets the OverrideRejection field's value. func (s *ApproveAssignmentInput) SetOverrideRejection(v bool) *ApproveAssignmentInput { s.OverrideRejection = &v return s } // SetRequesterFeedback sets the RequesterFeedback field's value. func (s *ApproveAssignmentInput) SetRequesterFeedback(v string) *ApproveAssignmentInput { s.RequesterFeedback = &v return s } type ApproveAssignmentOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s ApproveAssignmentOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ApproveAssignmentOutput) GoString() string { return s.String() } // The Assignment data structure represents a single assignment of a HIT to // a Worker. The assignment tracks the Worker's efforts to complete the HIT, // and contains the results for later retrieval. type Assignment struct { _ struct{} `type:"structure"` // The date and time the Worker accepted the assignment. AcceptTime *time.Time `type:"timestamp"` // The Worker's answers submitted for the HIT contained in a QuestionFormAnswers // document, if the Worker provides an answer. If the Worker does not provide // any answers, Answer may contain a QuestionFormAnswers document, or Answer // may be empty. Answer *string `type:"string"` // If the Worker has submitted results and the Requester has approved the results, // ApprovalTime is the date and time the Requester approved the results. This // value is omitted from the assignment if the Requester has not yet approved // the results. ApprovalTime *time.Time `type:"timestamp"` // A unique identifier for the assignment. AssignmentId *string `min:"1" type:"string"` // The status of the assignment. AssignmentStatus *string `type:"string" enum:"AssignmentStatus"` // If results have been submitted, AutoApprovalTime is the date and time the // results of the assignment results are considered Approved automatically if // they have not already been explicitly approved or rejected by the Requester. // This value is derived from the auto-approval delay specified by the Requester // in the HIT. This value is omitted from the assignment if the Worker has not // yet submitted results. AutoApprovalTime *time.Time `type:"timestamp"` // The date and time of the deadline for the assignment. This value is derived // from the deadline specification for the HIT and the date and time the Worker // accepted the HIT. Deadline *time.Time `type:"timestamp"` // The ID of the HIT. HITId *string `min:"1" type:"string"` // If the Worker has submitted results and the Requester has rejected the results, // RejectionTime is the date and time the Requester rejected the results. RejectionTime *time.Time `type:"timestamp"` // The feedback string included with the call to the ApproveAssignment operation // or the RejectAssignment operation, if the Requester approved or rejected // the assignment and specified feedback. RequesterFeedback *string `type:"string"` // If the Worker has submitted results, SubmitTime is the date and time the // assignment was submitted. This value is omitted from the assignment if the // Worker has not yet submitted results. SubmitTime *time.Time `type:"timestamp"` // The ID of the Worker who accepted the HIT. WorkerId *string `min:"1" type:"string"` } // String returns the string representation func (s Assignment) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s Assignment) GoString() string { return s.String() } // SetAcceptTime sets the AcceptTime field's value. func (s *Assignment) SetAcceptTime(v time.Time) *Assignment { s.AcceptTime = &v return s } // SetAnswer sets the Answer field's value. func (s *Assignment) SetAnswer(v string) *Assignment { s.Answer = &v return s } // SetApprovalTime sets the ApprovalTime field's value. func (s *Assignment) SetApprovalTime(v time.Time) *Assignment { s.ApprovalTime = &v return s } // SetAssignmentId sets the AssignmentId field's value. func (s *Assignment) SetAssignmentId(v string) *Assignment { s.AssignmentId = &v return s } // SetAssignmentStatus sets the AssignmentStatus field's value. func (s *Assignment) SetAssignmentStatus(v string) *Assignment { s.AssignmentStatus = &v return s } // SetAutoApprovalTime sets the AutoApprovalTime field's value. func (s *Assignment) SetAutoApprovalTime(v time.Time) *Assignment { s.AutoApprovalTime = &v return s } // SetDeadline sets the Deadline field's value. func (s *Assignment) SetDeadline(v time.Time) *Assignment { s.Deadline = &v return s } // SetHITId sets the HITId field's value. func (s *Assignment) SetHITId(v string) *Assignment { s.HITId = &v return s } // SetRejectionTime sets the RejectionTime field's value. func (s *Assignment) SetRejectionTime(v time.Time) *Assignment { s.RejectionTime = &v return s } // SetRequesterFeedback sets the RequesterFeedback field's value. func (s *Assignment) SetRequesterFeedback(v string) *Assignment { s.RequesterFeedback = &v return s } // SetSubmitTime sets the SubmitTime field's value. func (s *Assignment) SetSubmitTime(v time.Time) *Assignment { s.SubmitTime = &v return s } // SetWorkerId sets the WorkerId field's value. func (s *Assignment) SetWorkerId(v string) *Assignment { s.WorkerId = &v return s } type AssociateQualificationWithWorkerInput struct { _ struct{} `type:"structure"` // The value of the Qualification to assign. IntegerValue *int64 `type:"integer"` // The ID of the Qualification type to use for the assigned Qualification. // // QualificationTypeId is a required field QualificationTypeId *string `min:"1" type:"string" required:"true"` // Specifies whether to send a notification email message to the Worker saying // that the qualification was assigned to the Worker. Note: this is true by // default. SendNotification *bool `type:"boolean"` // The ID of the Worker to whom the Qualification is being assigned. Worker // IDs are included with submitted HIT assignments and Qualification requests. // // WorkerId is a required field WorkerId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s AssociateQualificationWithWorkerInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s AssociateQualificationWithWorkerInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *AssociateQualificationWithWorkerInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "AssociateQualificationWithWorkerInput"} if s.QualificationTypeId == nil { invalidParams.Add(request.NewErrParamRequired("QualificationTypeId")) } if s.QualificationTypeId != nil && len(*s.QualificationTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("QualificationTypeId", 1)) } if s.WorkerId == nil { invalidParams.Add(request.NewErrParamRequired("WorkerId")) } if s.WorkerId != nil && len(*s.WorkerId) < 1 { invalidParams.Add(request.NewErrParamMinLen("WorkerId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetIntegerValue sets the IntegerValue field's value. func (s *AssociateQualificationWithWorkerInput) SetIntegerValue(v int64) *AssociateQualificationWithWorkerInput { s.IntegerValue = &v return s } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *AssociateQualificationWithWorkerInput) SetQualificationTypeId(v string) *AssociateQualificationWithWorkerInput { s.QualificationTypeId = &v return s } // SetSendNotification sets the SendNotification field's value. func (s *AssociateQualificationWithWorkerInput) SetSendNotification(v bool) *AssociateQualificationWithWorkerInput { s.SendNotification = &v return s } // SetWorkerId sets the WorkerId field's value. func (s *AssociateQualificationWithWorkerInput) SetWorkerId(v string) *AssociateQualificationWithWorkerInput { s.WorkerId = &v return s } type AssociateQualificationWithWorkerOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s AssociateQualificationWithWorkerOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s AssociateQualificationWithWorkerOutput) GoString() string { return s.String() } // An object representing a Bonus payment paid to a Worker. type BonusPayment struct { _ struct{} `type:"structure"` // The ID of the assignment associated with this bonus payment. AssignmentId *string `min:"1" type:"string"` // A string representing a currency amount. BonusAmount *string `type:"string"` // The date and time of when the bonus was granted. GrantTime *time.Time `type:"timestamp"` // The Reason text given when the bonus was granted, if any. Reason *string `type:"string"` // The ID of the Worker to whom the bonus was paid. WorkerId *string `min:"1" type:"string"` } // String returns the string representation func (s BonusPayment) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s BonusPayment) GoString() string { return s.String() } // SetAssignmentId sets the AssignmentId field's value. func (s *BonusPayment) SetAssignmentId(v string) *BonusPayment { s.AssignmentId = &v return s } // SetBonusAmount sets the BonusAmount field's value. func (s *BonusPayment) SetBonusAmount(v string) *BonusPayment { s.BonusAmount = &v return s } // SetGrantTime sets the GrantTime field's value. func (s *BonusPayment) SetGrantTime(v time.Time) *BonusPayment { s.GrantTime = &v return s } // SetReason sets the Reason field's value. func (s *BonusPayment) SetReason(v string) *BonusPayment { s.Reason = &v return s } // SetWorkerId sets the WorkerId field's value. func (s *BonusPayment) SetWorkerId(v string) *BonusPayment { s.WorkerId = &v return s } type CreateAdditionalAssignmentsForHITInput struct { _ struct{} `type:"structure"` // The ID of the HIT to extend. // // HITId is a required field HITId *string `min:"1" type:"string" required:"true"` // The number of additional assignments to request for this HIT. // // NumberOfAdditionalAssignments is a required field NumberOfAdditionalAssignments *int64 `type:"integer" required:"true"` // A unique identifier for this request, which allows you to retry the call // on error without extending the HIT multiple times. This is useful in cases // such as network timeouts where it is unclear whether or not the call succeeded // on the server. If the extend HIT already exists in the system from a previous // call using the same UniqueRequestToken, subsequent calls will return an error // with a message containing the request ID. UniqueRequestToken *string `min:"1" type:"string"` } // String returns the string representation func (s CreateAdditionalAssignmentsForHITInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s CreateAdditionalAssignmentsForHITInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *CreateAdditionalAssignmentsForHITInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "CreateAdditionalAssignmentsForHITInput"} if s.HITId == nil { invalidParams.Add(request.NewErrParamRequired("HITId")) } if s.HITId != nil && len(*s.HITId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITId", 1)) } if s.NumberOfAdditionalAssignments == nil { invalidParams.Add(request.NewErrParamRequired("NumberOfAdditionalAssignments")) } if s.UniqueRequestToken != nil && len(*s.UniqueRequestToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("UniqueRequestToken", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetHITId sets the HITId field's value. func (s *CreateAdditionalAssignmentsForHITInput) SetHITId(v string) *CreateAdditionalAssignmentsForHITInput { s.HITId = &v return s } // SetNumberOfAdditionalAssignments sets the NumberOfAdditionalAssignments field's value. func (s *CreateAdditionalAssignmentsForHITInput) SetNumberOfAdditionalAssignments(v int64) *CreateAdditionalAssignmentsForHITInput { s.NumberOfAdditionalAssignments = &v return s } // SetUniqueRequestToken sets the UniqueRequestToken field's value. func (s *CreateAdditionalAssignmentsForHITInput) SetUniqueRequestToken(v string) *CreateAdditionalAssignmentsForHITInput { s.UniqueRequestToken = &v return s } type CreateAdditionalAssignmentsForHITOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s CreateAdditionalAssignmentsForHITOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s CreateAdditionalAssignmentsForHITOutput) GoString() string { return s.String() } type CreateHITInput struct { _ struct{} `type:"structure"` // The amount of time, in seconds, that a Worker has to complete the HIT after // accepting it. If a Worker does not complete the assignment within the specified // duration, the assignment is considered abandoned. If the HIT is still active // (that is, its lifetime has not elapsed), the assignment becomes available // for other users to find and accept. // // AssignmentDurationInSeconds is a required field AssignmentDurationInSeconds *int64 `type:"long" required:"true"` // The Assignment-level Review Policy applies to the assignments under the HIT. // You can specify for Mechanical Turk to take various actions based on the // policy. AssignmentReviewPolicy *ReviewPolicy `type:"structure"` // The number of seconds after an assignment for the HIT has been submitted, // after which the assignment is considered Approved automatically unless the // Requester explicitly rejects it. AutoApprovalDelayInSeconds *int64 `type:"long"` // A general description of the HIT. A description includes detailed information // about the kind of task the HIT contains. On the Amazon Mechanical Turk web // site, the HIT description appears in the expanded view of search results, // and in the HIT and assignment screens. A good description gives the user // enough information to evaluate the HIT before accepting it. // // Description is a required field Description *string `type:"string" required:"true"` // The HITLayoutId allows you to use a pre-existing HIT design with placeholder // values and create an additional HIT by providing those values as HITLayoutParameters. // // Constraints: Either a Question parameter or a HITLayoutId parameter must // be provided. HITLayoutId *string `min:"1" type:"string"` // If the HITLayoutId is provided, any placeholder values must be filled in // with values using the HITLayoutParameter structure. For more information, // see HITLayout. HITLayoutParameters []*HITLayoutParameter `type:"list"` // The HIT-level Review Policy applies to the HIT. You can specify for Mechanical // Turk to take various actions based on the policy. HITReviewPolicy *ReviewPolicy `type:"structure"` // One or more words or phrases that describe the HIT, separated by commas. // These words are used in searches to find HITs. Keywords *string `type:"string"` // An amount of time, in seconds, after which the HIT is no longer available // for users to accept. After the lifetime of the HIT elapses, the HIT no longer // appears in HIT searches, even if not all of the assignments for the HIT have // been accepted. // // LifetimeInSeconds is a required field LifetimeInSeconds *int64 `type:"long" required:"true"` // The number of times the HIT can be accepted and completed before the HIT // becomes unavailable. MaxAssignments *int64 `type:"integer"` // Conditions that a Worker's Qualifications must meet in order to accept the // HIT. A HIT can have between zero and ten Qualification requirements. All // requirements must be met in order for a Worker to accept the HIT. Additionally, // other actions can be restricted using the ActionsGuarded field on each QualificationRequirement // structure. QualificationRequirements []*QualificationRequirement `type:"list"` // The data the person completing the HIT uses to produce the results. // // Constraints: Must be a QuestionForm data structure, an ExternalQuestion data // structure, or an HTMLQuestion data structure. The XML question data must // not be larger than 64 kilobytes (65,535 bytes) in size, including whitespace. // // Either a Question parameter or a HITLayoutId parameter must be provided. Question *string `type:"string"` // An arbitrary data field. The RequesterAnnotation parameter lets your application // attach arbitrary data to the HIT for tracking purposes. For example, this // parameter could be an identifier internal to the Requester's application // that corresponds with the HIT. // // The RequesterAnnotation parameter for a HIT is only visible to the Requester // who created the HIT. It is not shown to the Worker, or any other Requester. // // The RequesterAnnotation parameter may be different for each HIT you submit. // It does not affect how your HITs are grouped. RequesterAnnotation *string `type:"string"` // The amount of money the Requester will pay a Worker for successfully completing // the HIT. // // Reward is a required field Reward *string `type:"string" required:"true"` // The title of the HIT. A title should be short and descriptive about the kind // of task the HIT contains. On the Amazon Mechanical Turk web site, the HIT // title appears in search results, and everywhere the HIT is mentioned. // // Title is a required field Title *string `type:"string" required:"true"` // A unique identifier for this request which allows you to retry the call on // error without creating duplicate HITs. This is useful in cases such as network // timeouts where it is unclear whether or not the call succeeded on the server. // If the HIT already exists in the system from a previous call using the same // UniqueRequestToken, subsequent calls will return a AWS.MechanicalTurk.HitAlreadyExists // error with a message containing the HITId. // // Note: It is your responsibility to ensure uniqueness of the token. The unique // token expires after 24 hours. Subsequent calls using the same UniqueRequestToken // made after the 24 hour limit could create duplicate HITs. UniqueRequestToken *string `min:"1" type:"string"` } // String returns the string representation func (s CreateHITInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s CreateHITInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *CreateHITInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "CreateHITInput"} if s.AssignmentDurationInSeconds == nil { invalidParams.Add(request.NewErrParamRequired("AssignmentDurationInSeconds")) } if s.Description == nil { invalidParams.Add(request.NewErrParamRequired("Description")) } if s.HITLayoutId != nil && len(*s.HITLayoutId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITLayoutId", 1)) } if s.LifetimeInSeconds == nil { invalidParams.Add(request.NewErrParamRequired("LifetimeInSeconds")) } if s.Reward == nil { invalidParams.Add(request.NewErrParamRequired("Reward")) } if s.Title == nil { invalidParams.Add(request.NewErrParamRequired("Title")) } if s.UniqueRequestToken != nil && len(*s.UniqueRequestToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("UniqueRequestToken", 1)) } if s.AssignmentReviewPolicy != nil { if err := s.AssignmentReviewPolicy.Validate(); err != nil { invalidParams.AddNested("AssignmentReviewPolicy", err.(request.ErrInvalidParams)) } } if s.HITLayoutParameters != nil { for i, v := range s.HITLayoutParameters { if v == nil { continue } if err := v.Validate(); err != nil { invalidParams.AddNested(fmt.Sprintf("%s[%v]", "HITLayoutParameters", i), err.(request.ErrInvalidParams)) } } } if s.HITReviewPolicy != nil { if err := s.HITReviewPolicy.Validate(); err != nil { invalidParams.AddNested("HITReviewPolicy", err.(request.ErrInvalidParams)) } } if s.QualificationRequirements != nil { for i, v := range s.QualificationRequirements { if v == nil { continue } if err := v.Validate(); err != nil { invalidParams.AddNested(fmt.Sprintf("%s[%v]", "QualificationRequirements", i), err.(request.ErrInvalidParams)) } } } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAssignmentDurationInSeconds sets the AssignmentDurationInSeconds field's value. func (s *CreateHITInput) SetAssignmentDurationInSeconds(v int64) *CreateHITInput { s.AssignmentDurationInSeconds = &v return s } // SetAssignmentReviewPolicy sets the AssignmentReviewPolicy field's value. func (s *CreateHITInput) SetAssignmentReviewPolicy(v *ReviewPolicy) *CreateHITInput { s.AssignmentReviewPolicy = v return s } // SetAutoApprovalDelayInSeconds sets the AutoApprovalDelayInSeconds field's value. func (s *CreateHITInput) SetAutoApprovalDelayInSeconds(v int64) *CreateHITInput { s.AutoApprovalDelayInSeconds = &v return s } // SetDescription sets the Description field's value. func (s *CreateHITInput) SetDescription(v string) *CreateHITInput { s.Description = &v return s } // SetHITLayoutId sets the HITLayoutId field's value. func (s *CreateHITInput) SetHITLayoutId(v string) *CreateHITInput { s.HITLayoutId = &v return s } // SetHITLayoutParameters sets the HITLayoutParameters field's value. func (s *CreateHITInput) SetHITLayoutParameters(v []*HITLayoutParameter) *CreateHITInput { s.HITLayoutParameters = v return s } // SetHITReviewPolicy sets the HITReviewPolicy field's value. func (s *CreateHITInput) SetHITReviewPolicy(v *ReviewPolicy) *CreateHITInput { s.HITReviewPolicy = v return s } // SetKeywords sets the Keywords field's value. func (s *CreateHITInput) SetKeywords(v string) *CreateHITInput { s.Keywords = &v return s } // SetLifetimeInSeconds sets the LifetimeInSeconds field's value. func (s *CreateHITInput) SetLifetimeInSeconds(v int64) *CreateHITInput { s.LifetimeInSeconds = &v return s } // SetMaxAssignments sets the MaxAssignments field's value. func (s *CreateHITInput) SetMaxAssignments(v int64) *CreateHITInput { s.MaxAssignments = &v return s } // SetQualificationRequirements sets the QualificationRequirements field's value. func (s *CreateHITInput) SetQualificationRequirements(v []*QualificationRequirement) *CreateHITInput { s.QualificationRequirements = v return s } // SetQuestion sets the Question field's value. func (s *CreateHITInput) SetQuestion(v string) *CreateHITInput { s.Question = &v return s } // SetRequesterAnnotation sets the RequesterAnnotation field's value. func (s *CreateHITInput) SetRequesterAnnotation(v string) *CreateHITInput { s.RequesterAnnotation = &v return s } // SetReward sets the Reward field's value. func (s *CreateHITInput) SetReward(v string) *CreateHITInput { s.Reward = &v return s } // SetTitle sets the Title field's value. func (s *CreateHITInput) SetTitle(v string) *CreateHITInput { s.Title = &v return s } // SetUniqueRequestToken sets the UniqueRequestToken field's value. func (s *CreateHITInput) SetUniqueRequestToken(v string) *CreateHITInput { s.UniqueRequestToken = &v return s } type CreateHITOutput struct { _ struct{} `type:"structure"` // Contains the newly created HIT data. For a description of the HIT data structure // as it appears in responses, see the HIT Data Structure documentation. HIT *HIT `type:"structure"` } // String returns the string representation func (s CreateHITOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s CreateHITOutput) GoString() string { return s.String() } // SetHIT sets the HIT field's value. func (s *CreateHITOutput) SetHIT(v *HIT) *CreateHITOutput { s.HIT = v return s } type CreateHITTypeInput struct { _ struct{} `type:"structure"` // The amount of time, in seconds, that a Worker has to complete the HIT after // accepting it. If a Worker does not complete the assignment within the specified // duration, the assignment is considered abandoned. If the HIT is still active // (that is, its lifetime has not elapsed), the assignment becomes available // for other users to find and accept. // // AssignmentDurationInSeconds is a required field AssignmentDurationInSeconds *int64 `type:"long" required:"true"` // The number of seconds after an assignment for the HIT has been submitted, // after which the assignment is considered Approved automatically unless the // Requester explicitly rejects it. AutoApprovalDelayInSeconds *int64 `type:"long"` // A general description of the HIT. A description includes detailed information // about the kind of task the HIT contains. On the Amazon Mechanical Turk web // site, the HIT description appears in the expanded view of search results, // and in the HIT and assignment screens. A good description gives the user // enough information to evaluate the HIT before accepting it. // // Description is a required field Description *string `type:"string" required:"true"` // One or more words or phrases that describe the HIT, separated by commas. // These words are used in searches to find HITs. Keywords *string `type:"string"` // Conditions that a Worker's Qualifications must meet in order to accept the // HIT. A HIT can have between zero and ten Qualification requirements. All // requirements must be met in order for a Worker to accept the HIT. Additionally, // other actions can be restricted using the ActionsGuarded field on each QualificationRequirement // structure. QualificationRequirements []*QualificationRequirement `type:"list"` // The amount of money the Requester will pay a Worker for successfully completing // the HIT. // // Reward is a required field Reward *string `type:"string" required:"true"` // The title of the HIT. A title should be short and descriptive about the kind // of task the HIT contains. On the Amazon Mechanical Turk web site, the HIT // title appears in search results, and everywhere the HIT is mentioned. // // Title is a required field Title *string `type:"string" required:"true"` } // String returns the string representation func (s CreateHITTypeInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s CreateHITTypeInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *CreateHITTypeInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "CreateHITTypeInput"} if s.AssignmentDurationInSeconds == nil { invalidParams.Add(request.NewErrParamRequired("AssignmentDurationInSeconds")) } if s.Description == nil { invalidParams.Add(request.NewErrParamRequired("Description")) } if s.Reward == nil { invalidParams.Add(request.NewErrParamRequired("Reward")) } if s.Title == nil { invalidParams.Add(request.NewErrParamRequired("Title")) } if s.QualificationRequirements != nil { for i, v := range s.QualificationRequirements { if v == nil { continue } if err := v.Validate(); err != nil { invalidParams.AddNested(fmt.Sprintf("%s[%v]", "QualificationRequirements", i), err.(request.ErrInvalidParams)) } } } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAssignmentDurationInSeconds sets the AssignmentDurationInSeconds field's value. func (s *CreateHITTypeInput) SetAssignmentDurationInSeconds(v int64) *CreateHITTypeInput { s.AssignmentDurationInSeconds = &v return s } // SetAutoApprovalDelayInSeconds sets the AutoApprovalDelayInSeconds field's value. func (s *CreateHITTypeInput) SetAutoApprovalDelayInSeconds(v int64) *CreateHITTypeInput { s.AutoApprovalDelayInSeconds = &v return s } // SetDescription sets the Description field's value. func (s *CreateHITTypeInput) SetDescription(v string) *CreateHITTypeInput { s.Description = &v return s } // SetKeywords sets the Keywords field's value. func (s *CreateHITTypeInput) SetKeywords(v string) *CreateHITTypeInput { s.Keywords = &v return s } // SetQualificationRequirements sets the QualificationRequirements field's value. func (s *CreateHITTypeInput) SetQualificationRequirements(v []*QualificationRequirement) *CreateHITTypeInput { s.QualificationRequirements = v return s } // SetReward sets the Reward field's value. func (s *CreateHITTypeInput) SetReward(v string) *CreateHITTypeInput { s.Reward = &v return s } // SetTitle sets the Title field's value. func (s *CreateHITTypeInput) SetTitle(v string) *CreateHITTypeInput { s.Title = &v return s } type CreateHITTypeOutput struct { _ struct{} `type:"structure"` // The ID of the newly registered HIT type. HITTypeId *string `min:"1" type:"string"` } // String returns the string representation func (s CreateHITTypeOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s CreateHITTypeOutput) GoString() string { return s.String() } // SetHITTypeId sets the HITTypeId field's value. func (s *CreateHITTypeOutput) SetHITTypeId(v string) *CreateHITTypeOutput { s.HITTypeId = &v return s } type CreateHITWithHITTypeInput struct { _ struct{} `type:"structure"` // The Assignment-level Review Policy applies to the assignments under the HIT. // You can specify for Mechanical Turk to take various actions based on the // policy. AssignmentReviewPolicy *ReviewPolicy `type:"structure"` // The HITLayoutId allows you to use a pre-existing HIT design with placeholder // values and create an additional HIT by providing those values as HITLayoutParameters. // // Constraints: Either a Question parameter or a HITLayoutId parameter must // be provided. HITLayoutId *string `min:"1" type:"string"` // If the HITLayoutId is provided, any placeholder values must be filled in // with values using the HITLayoutParameter structure. For more information, // see HITLayout. HITLayoutParameters []*HITLayoutParameter `type:"list"` // The HIT-level Review Policy applies to the HIT. You can specify for Mechanical // Turk to take various actions based on the policy. HITReviewPolicy *ReviewPolicy `type:"structure"` // The HIT type ID you want to create this HIT with. // // HITTypeId is a required field HITTypeId *string `min:"1" type:"string" required:"true"` // An amount of time, in seconds, after which the HIT is no longer available // for users to accept. After the lifetime of the HIT elapses, the HIT no longer // appears in HIT searches, even if not all of the assignments for the HIT have // been accepted. // // LifetimeInSeconds is a required field LifetimeInSeconds *int64 `type:"long" required:"true"` // The number of times the HIT can be accepted and completed before the HIT // becomes unavailable. MaxAssignments *int64 `type:"integer"` // The data the person completing the HIT uses to produce the results. // // Constraints: Must be a QuestionForm data structure, an ExternalQuestion data // structure, or an HTMLQuestion data structure. The XML question data must // not be larger than 64 kilobytes (65,535 bytes) in size, including whitespace. // // Either a Question parameter or a HITLayoutId parameter must be provided. Question *string `type:"string"` // An arbitrary data field. The RequesterAnnotation parameter lets your application // attach arbitrary data to the HIT for tracking purposes. For example, this // parameter could be an identifier internal to the Requester's application // that corresponds with the HIT. // // The RequesterAnnotation parameter for a HIT is only visible to the Requester // who created the HIT. It is not shown to the Worker, or any other Requester. // // The RequesterAnnotation parameter may be different for each HIT you submit. // It does not affect how your HITs are grouped. RequesterAnnotation *string `type:"string"` // A unique identifier for this request which allows you to retry the call on // error without creating duplicate HITs. This is useful in cases such as network // timeouts where it is unclear whether or not the call succeeded on the server. // If the HIT already exists in the system from a previous call using the same // UniqueRequestToken, subsequent calls will return a AWS.MechanicalTurk.HitAlreadyExists // error with a message containing the HITId. // // Note: It is your responsibility to ensure uniqueness of the token. The unique // token expires after 24 hours. Subsequent calls using the same UniqueRequestToken // made after the 24 hour limit could create duplicate HITs. UniqueRequestToken *string `min:"1" type:"string"` } // String returns the string representation func (s CreateHITWithHITTypeInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s CreateHITWithHITTypeInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *CreateHITWithHITTypeInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "CreateHITWithHITTypeInput"} if s.HITLayoutId != nil && len(*s.HITLayoutId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITLayoutId", 1)) } if s.HITTypeId == nil { invalidParams.Add(request.NewErrParamRequired("HITTypeId")) } if s.HITTypeId != nil && len(*s.HITTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITTypeId", 1)) } if s.LifetimeInSeconds == nil { invalidParams.Add(request.NewErrParamRequired("LifetimeInSeconds")) } if s.UniqueRequestToken != nil && len(*s.UniqueRequestToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("UniqueRequestToken", 1)) } if s.AssignmentReviewPolicy != nil { if err := s.AssignmentReviewPolicy.Validate(); err != nil { invalidParams.AddNested("AssignmentReviewPolicy", err.(request.ErrInvalidParams)) } } if s.HITLayoutParameters != nil { for i, v := range s.HITLayoutParameters { if v == nil { continue } if err := v.Validate(); err != nil { invalidParams.AddNested(fmt.Sprintf("%s[%v]", "HITLayoutParameters", i), err.(request.ErrInvalidParams)) } } } if s.HITReviewPolicy != nil { if err := s.HITReviewPolicy.Validate(); err != nil { invalidParams.AddNested("HITReviewPolicy", err.(request.ErrInvalidParams)) } } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAssignmentReviewPolicy sets the AssignmentReviewPolicy field's value. func (s *CreateHITWithHITTypeInput) SetAssignmentReviewPolicy(v *ReviewPolicy) *CreateHITWithHITTypeInput { s.AssignmentReviewPolicy = v return s } // SetHITLayoutId sets the HITLayoutId field's value. func (s *CreateHITWithHITTypeInput) SetHITLayoutId(v string) *CreateHITWithHITTypeInput { s.HITLayoutId = &v return s } // SetHITLayoutParameters sets the HITLayoutParameters field's value. func (s *CreateHITWithHITTypeInput) SetHITLayoutParameters(v []*HITLayoutParameter) *CreateHITWithHITTypeInput { s.HITLayoutParameters = v return s } // SetHITReviewPolicy sets the HITReviewPolicy field's value. func (s *CreateHITWithHITTypeInput) SetHITReviewPolicy(v *ReviewPolicy) *CreateHITWithHITTypeInput { s.HITReviewPolicy = v return s } // SetHITTypeId sets the HITTypeId field's value. func (s *CreateHITWithHITTypeInput) SetHITTypeId(v string) *CreateHITWithHITTypeInput { s.HITTypeId = &v return s } // SetLifetimeInSeconds sets the LifetimeInSeconds field's value. func (s *CreateHITWithHITTypeInput) SetLifetimeInSeconds(v int64) *CreateHITWithHITTypeInput { s.LifetimeInSeconds = &v return s } // SetMaxAssignments sets the MaxAssignments field's value. func (s *CreateHITWithHITTypeInput) SetMaxAssignments(v int64) *CreateHITWithHITTypeInput { s.MaxAssignments = &v return s } // SetQuestion sets the Question field's value. func (s *CreateHITWithHITTypeInput) SetQuestion(v string) *CreateHITWithHITTypeInput { s.Question = &v return s } // SetRequesterAnnotation sets the RequesterAnnotation field's value. func (s *CreateHITWithHITTypeInput) SetRequesterAnnotation(v string) *CreateHITWithHITTypeInput { s.RequesterAnnotation = &v return s } // SetUniqueRequestToken sets the UniqueRequestToken field's value. func (s *CreateHITWithHITTypeInput) SetUniqueRequestToken(v string) *CreateHITWithHITTypeInput { s.UniqueRequestToken = &v return s } type CreateHITWithHITTypeOutput struct { _ struct{} `type:"structure"` // Contains the newly created HIT data. For a description of the HIT data structure // as it appears in responses, see the HIT Data Structure documentation. HIT *HIT `type:"structure"` } // String returns the string representation func (s CreateHITWithHITTypeOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s CreateHITWithHITTypeOutput) GoString() string { return s.String() } // SetHIT sets the HIT field's value. func (s *CreateHITWithHITTypeOutput) SetHIT(v *HIT) *CreateHITWithHITTypeOutput { s.HIT = v return s } type CreateQualificationTypeInput struct { _ struct{} `type:"structure"` // The answers to the Qualification test specified in the Test parameter, in // the form of an AnswerKey data structure. // // Constraints: Must not be longer than 65535 bytes. // // Constraints: None. If not specified, you must process Qualification requests // manually. AnswerKey *string `type:"string"` // Specifies whether requests for the Qualification type are granted immediately, // without prompting the Worker with a Qualification test. // // Constraints: If the Test parameter is specified, this parameter cannot be // true. AutoGranted *bool `type:"boolean"` // The Qualification value to use for automatically granted Qualifications. // This parameter is used only if the AutoGranted parameter is true. AutoGrantedValue *int64 `type:"integer"` // A long description for the Qualification type. On the Amazon Mechanical Turk // website, the long description is displayed when a Worker examines a Qualification // type. // // Description is a required field Description *string `type:"string" required:"true"` // One or more words or phrases that describe the Qualification type, separated // by commas. The keywords of a type make the type easier to find during a search. Keywords *string `type:"string"` // The name you give to the Qualification type. The type name is used to represent // the Qualification to Workers, and to find the type using a Qualification // type search. It must be unique across all of your Qualification types. // // Name is a required field Name *string `type:"string" required:"true"` // The initial status of the Qualification type. // // Constraints: Valid values are: Active | Inactive // // QualificationTypeStatus is a required field QualificationTypeStatus *string `type:"string" required:"true" enum:"QualificationTypeStatus"` // The number of seconds that a Worker must wait after requesting a Qualification // of the Qualification type before the worker can retry the Qualification request. // // Constraints: None. If not specified, retries are disabled and Workers can // request a Qualification of this type only once, even if the Worker has not // been granted the Qualification. It is not possible to disable retries for // a Qualification type after it has been created with retries enabled. If you // want to disable retries, you must delete existing retry-enabled Qualification // type and then create a new Qualification type with retries disabled. RetryDelayInSeconds *int64 `type:"long"` // The questions for the Qualification test a Worker must answer correctly to // obtain a Qualification of this type. If this parameter is specified, TestDurationInSeconds // must also be specified. // // Constraints: Must not be longer than 65535 bytes. Must be a QuestionForm // data structure. This parameter cannot be specified if AutoGranted is true. // // Constraints: None. If not specified, the Worker may request the Qualification // without answering any questions. Test *string `type:"string"` // The number of seconds the Worker has to complete the Qualification test, // starting from the time the Worker requests the Qualification. TestDurationInSeconds *int64 `type:"long"` } // String returns the string representation func (s CreateQualificationTypeInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s CreateQualificationTypeInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *CreateQualificationTypeInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "CreateQualificationTypeInput"} if s.Description == nil { invalidParams.Add(request.NewErrParamRequired("Description")) } if s.Name == nil { invalidParams.Add(request.NewErrParamRequired("Name")) } if s.QualificationTypeStatus == nil { invalidParams.Add(request.NewErrParamRequired("QualificationTypeStatus")) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAnswerKey sets the AnswerKey field's value. func (s *CreateQualificationTypeInput) SetAnswerKey(v string) *CreateQualificationTypeInput { s.AnswerKey = &v return s } // SetAutoGranted sets the AutoGranted field's value. func (s *CreateQualificationTypeInput) SetAutoGranted(v bool) *CreateQualificationTypeInput { s.AutoGranted = &v return s } // SetAutoGrantedValue sets the AutoGrantedValue field's value. func (s *CreateQualificationTypeInput) SetAutoGrantedValue(v int64) *CreateQualificationTypeInput { s.AutoGrantedValue = &v return s } // SetDescription sets the Description field's value. func (s *CreateQualificationTypeInput) SetDescription(v string) *CreateQualificationTypeInput { s.Description = &v return s } // SetKeywords sets the Keywords field's value. func (s *CreateQualificationTypeInput) SetKeywords(v string) *CreateQualificationTypeInput { s.Keywords = &v return s } // SetName sets the Name field's value. func (s *CreateQualificationTypeInput) SetName(v string) *CreateQualificationTypeInput { s.Name = &v return s } // SetQualificationTypeStatus sets the QualificationTypeStatus field's value. func (s *CreateQualificationTypeInput) SetQualificationTypeStatus(v string) *CreateQualificationTypeInput { s.QualificationTypeStatus = &v return s } // SetRetryDelayInSeconds sets the RetryDelayInSeconds field's value. func (s *CreateQualificationTypeInput) SetRetryDelayInSeconds(v int64) *CreateQualificationTypeInput { s.RetryDelayInSeconds = &v return s } // SetTest sets the Test field's value. func (s *CreateQualificationTypeInput) SetTest(v string) *CreateQualificationTypeInput { s.Test = &v return s } // SetTestDurationInSeconds sets the TestDurationInSeconds field's value. func (s *CreateQualificationTypeInput) SetTestDurationInSeconds(v int64) *CreateQualificationTypeInput { s.TestDurationInSeconds = &v return s } type CreateQualificationTypeOutput struct { _ struct{} `type:"structure"` // The created Qualification type, returned as a QualificationType data structure. QualificationType *QualificationType `type:"structure"` } // String returns the string representation func (s CreateQualificationTypeOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s CreateQualificationTypeOutput) GoString() string { return s.String() } // SetQualificationType sets the QualificationType field's value. func (s *CreateQualificationTypeOutput) SetQualificationType(v *QualificationType) *CreateQualificationTypeOutput { s.QualificationType = v return s } type CreateWorkerBlockInput struct { _ struct{} `type:"structure"` // A message explaining the reason for blocking the Worker. This parameter enables // you to keep track of your Workers. The Worker does not see this message. // // Reason is a required field Reason *string `type:"string" required:"true"` // The ID of the Worker to block. // // WorkerId is a required field WorkerId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s CreateWorkerBlockInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s CreateWorkerBlockInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *CreateWorkerBlockInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "CreateWorkerBlockInput"} if s.Reason == nil { invalidParams.Add(request.NewErrParamRequired("Reason")) } if s.WorkerId == nil { invalidParams.Add(request.NewErrParamRequired("WorkerId")) } if s.WorkerId != nil && len(*s.WorkerId) < 1 { invalidParams.Add(request.NewErrParamMinLen("WorkerId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetReason sets the Reason field's value. func (s *CreateWorkerBlockInput) SetReason(v string) *CreateWorkerBlockInput { s.Reason = &v return s } // SetWorkerId sets the WorkerId field's value. func (s *CreateWorkerBlockInput) SetWorkerId(v string) *CreateWorkerBlockInput { s.WorkerId = &v return s } type CreateWorkerBlockOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s CreateWorkerBlockOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s CreateWorkerBlockOutput) GoString() string { return s.String() } type DeleteHITInput struct { _ struct{} `type:"structure"` // The ID of the HIT to be deleted. // // HITId is a required field HITId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s DeleteHITInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s DeleteHITInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *DeleteHITInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "DeleteHITInput"} if s.HITId == nil { invalidParams.Add(request.NewErrParamRequired("HITId")) } if s.HITId != nil && len(*s.HITId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetHITId sets the HITId field's value. func (s *DeleteHITInput) SetHITId(v string) *DeleteHITInput { s.HITId = &v return s } type DeleteHITOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s DeleteHITOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s DeleteHITOutput) GoString() string { return s.String() } type DeleteQualificationTypeInput struct { _ struct{} `type:"structure"` // The ID of the QualificationType to dispose. // // QualificationTypeId is a required field QualificationTypeId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s DeleteQualificationTypeInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s DeleteQualificationTypeInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *DeleteQualificationTypeInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "DeleteQualificationTypeInput"} if s.QualificationTypeId == nil { invalidParams.Add(request.NewErrParamRequired("QualificationTypeId")) } if s.QualificationTypeId != nil && len(*s.QualificationTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("QualificationTypeId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *DeleteQualificationTypeInput) SetQualificationTypeId(v string) *DeleteQualificationTypeInput { s.QualificationTypeId = &v return s } type DeleteQualificationTypeOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s DeleteQualificationTypeOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s DeleteQualificationTypeOutput) GoString() string { return s.String() } type DeleteWorkerBlockInput struct { _ struct{} `type:"structure"` // A message that explains the reason for unblocking the Worker. The Worker // does not see this message. Reason *string `type:"string"` // The ID of the Worker to unblock. // // WorkerId is a required field WorkerId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s DeleteWorkerBlockInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s DeleteWorkerBlockInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *DeleteWorkerBlockInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "DeleteWorkerBlockInput"} if s.WorkerId == nil { invalidParams.Add(request.NewErrParamRequired("WorkerId")) } if s.WorkerId != nil && len(*s.WorkerId) < 1 { invalidParams.Add(request.NewErrParamMinLen("WorkerId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetReason sets the Reason field's value. func (s *DeleteWorkerBlockInput) SetReason(v string) *DeleteWorkerBlockInput { s.Reason = &v return s } // SetWorkerId sets the WorkerId field's value. func (s *DeleteWorkerBlockInput) SetWorkerId(v string) *DeleteWorkerBlockInput { s.WorkerId = &v return s } type DeleteWorkerBlockOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s DeleteWorkerBlockOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s DeleteWorkerBlockOutput) GoString() string { return s.String() } type DisassociateQualificationFromWorkerInput struct { _ struct{} `type:"structure"` // The ID of the Qualification type of the Qualification to be revoked. // // QualificationTypeId is a required field QualificationTypeId *string `min:"1" type:"string" required:"true"` // A text message that explains why the Qualification was revoked. The user // who had the Qualification sees this message. Reason *string `type:"string"` // The ID of the Worker who possesses the Qualification to be revoked. // // WorkerId is a required field WorkerId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s DisassociateQualificationFromWorkerInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s DisassociateQualificationFromWorkerInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *DisassociateQualificationFromWorkerInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "DisassociateQualificationFromWorkerInput"} if s.QualificationTypeId == nil { invalidParams.Add(request.NewErrParamRequired("QualificationTypeId")) } if s.QualificationTypeId != nil && len(*s.QualificationTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("QualificationTypeId", 1)) } if s.WorkerId == nil { invalidParams.Add(request.NewErrParamRequired("WorkerId")) } if s.WorkerId != nil && len(*s.WorkerId) < 1 { invalidParams.Add(request.NewErrParamMinLen("WorkerId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *DisassociateQualificationFromWorkerInput) SetQualificationTypeId(v string) *DisassociateQualificationFromWorkerInput { s.QualificationTypeId = &v return s } // SetReason sets the Reason field's value. func (s *DisassociateQualificationFromWorkerInput) SetReason(v string) *DisassociateQualificationFromWorkerInput { s.Reason = &v return s } // SetWorkerId sets the WorkerId field's value. func (s *DisassociateQualificationFromWorkerInput) SetWorkerId(v string) *DisassociateQualificationFromWorkerInput { s.WorkerId = &v return s } type DisassociateQualificationFromWorkerOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s DisassociateQualificationFromWorkerOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s DisassociateQualificationFromWorkerOutput) GoString() string { return s.String() } type GetAccountBalanceInput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s GetAccountBalanceInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetAccountBalanceInput) GoString() string { return s.String() } type GetAccountBalanceOutput struct { _ struct{} `type:"structure"` // A string representing a currency amount. AvailableBalance *string `type:"string"` // A string representing a currency amount. OnHoldBalance *string `type:"string"` } // String returns the string representation func (s GetAccountBalanceOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetAccountBalanceOutput) GoString() string { return s.String() } // SetAvailableBalance sets the AvailableBalance field's value. func (s *GetAccountBalanceOutput) SetAvailableBalance(v string) *GetAccountBalanceOutput { s.AvailableBalance = &v return s } // SetOnHoldBalance sets the OnHoldBalance field's value. func (s *GetAccountBalanceOutput) SetOnHoldBalance(v string) *GetAccountBalanceOutput { s.OnHoldBalance = &v return s } type GetAssignmentInput struct { _ struct{} `type:"structure"` // The ID of the Assignment to be retrieved. // // AssignmentId is a required field AssignmentId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s GetAssignmentInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetAssignmentInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *GetAssignmentInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "GetAssignmentInput"} if s.AssignmentId == nil { invalidParams.Add(request.NewErrParamRequired("AssignmentId")) } if s.AssignmentId != nil && len(*s.AssignmentId) < 1 { invalidParams.Add(request.NewErrParamMinLen("AssignmentId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAssignmentId sets the AssignmentId field's value. func (s *GetAssignmentInput) SetAssignmentId(v string) *GetAssignmentInput { s.AssignmentId = &v return s } type GetAssignmentOutput struct { _ struct{} `type:"structure"` // The assignment. The response includes one Assignment element. Assignment *Assignment `type:"structure"` // The HIT associated with this assignment. The response includes one HIT element. HIT *HIT `type:"structure"` } // String returns the string representation func (s GetAssignmentOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetAssignmentOutput) GoString() string { return s.String() } // SetAssignment sets the Assignment field's value. func (s *GetAssignmentOutput) SetAssignment(v *Assignment) *GetAssignmentOutput { s.Assignment = v return s } // SetHIT sets the HIT field's value. func (s *GetAssignmentOutput) SetHIT(v *HIT) *GetAssignmentOutput { s.HIT = v return s } type GetFileUploadURLInput struct { _ struct{} `type:"structure"` // The ID of the assignment that contains the question with a FileUploadAnswer. // // AssignmentId is a required field AssignmentId *string `min:"1" type:"string" required:"true"` // The identifier of the question with a FileUploadAnswer, as specified in the // QuestionForm of the HIT. // // QuestionIdentifier is a required field QuestionIdentifier *string `type:"string" required:"true"` } // String returns the string representation func (s GetFileUploadURLInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetFileUploadURLInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *GetFileUploadURLInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "GetFileUploadURLInput"} if s.AssignmentId == nil { invalidParams.Add(request.NewErrParamRequired("AssignmentId")) } if s.AssignmentId != nil && len(*s.AssignmentId) < 1 { invalidParams.Add(request.NewErrParamMinLen("AssignmentId", 1)) } if s.QuestionIdentifier == nil { invalidParams.Add(request.NewErrParamRequired("QuestionIdentifier")) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAssignmentId sets the AssignmentId field's value. func (s *GetFileUploadURLInput) SetAssignmentId(v string) *GetFileUploadURLInput { s.AssignmentId = &v return s } // SetQuestionIdentifier sets the QuestionIdentifier field's value. func (s *GetFileUploadURLInput) SetQuestionIdentifier(v string) *GetFileUploadURLInput { s.QuestionIdentifier = &v return s } type GetFileUploadURLOutput struct { _ struct{} `type:"structure"` // A temporary URL for the file that the Worker uploaded for the answer. FileUploadURL *string `type:"string"` } // String returns the string representation func (s GetFileUploadURLOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetFileUploadURLOutput) GoString() string { return s.String() } // SetFileUploadURL sets the FileUploadURL field's value. func (s *GetFileUploadURLOutput) SetFileUploadURL(v string) *GetFileUploadURLOutput { s.FileUploadURL = &v return s } type GetHITInput struct { _ struct{} `type:"structure"` // The ID of the HIT to be retrieved. // // HITId is a required field HITId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s GetHITInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetHITInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *GetHITInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "GetHITInput"} if s.HITId == nil { invalidParams.Add(request.NewErrParamRequired("HITId")) } if s.HITId != nil && len(*s.HITId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetHITId sets the HITId field's value. func (s *GetHITInput) SetHITId(v string) *GetHITInput { s.HITId = &v return s } type GetHITOutput struct { _ struct{} `type:"structure"` // Contains the requested HIT data. HIT *HIT `type:"structure"` } // String returns the string representation func (s GetHITOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetHITOutput) GoString() string { return s.String() } // SetHIT sets the HIT field's value. func (s *GetHITOutput) SetHIT(v *HIT) *GetHITOutput { s.HIT = v return s } type GetQualificationScoreInput struct { _ struct{} `type:"structure"` // The ID of the QualificationType. // // QualificationTypeId is a required field QualificationTypeId *string `min:"1" type:"string" required:"true"` // The ID of the Worker whose Qualification is being updated. // // WorkerId is a required field WorkerId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s GetQualificationScoreInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetQualificationScoreInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *GetQualificationScoreInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "GetQualificationScoreInput"} if s.QualificationTypeId == nil { invalidParams.Add(request.NewErrParamRequired("QualificationTypeId")) } if s.QualificationTypeId != nil && len(*s.QualificationTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("QualificationTypeId", 1)) } if s.WorkerId == nil { invalidParams.Add(request.NewErrParamRequired("WorkerId")) } if s.WorkerId != nil && len(*s.WorkerId) < 1 { invalidParams.Add(request.NewErrParamMinLen("WorkerId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *GetQualificationScoreInput) SetQualificationTypeId(v string) *GetQualificationScoreInput { s.QualificationTypeId = &v return s } // SetWorkerId sets the WorkerId field's value. func (s *GetQualificationScoreInput) SetWorkerId(v string) *GetQualificationScoreInput { s.WorkerId = &v return s } type GetQualificationScoreOutput struct { _ struct{} `type:"structure"` // The Qualification data structure of the Qualification assigned to a user, // including the Qualification type and the value (score). Qualification *Qualification `type:"structure"` } // String returns the string representation func (s GetQualificationScoreOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetQualificationScoreOutput) GoString() string { return s.String() } // SetQualification sets the Qualification field's value. func (s *GetQualificationScoreOutput) SetQualification(v *Qualification) *GetQualificationScoreOutput { s.Qualification = v return s } type GetQualificationTypeInput struct { _ struct{} `type:"structure"` // The ID of the QualificationType. // // QualificationTypeId is a required field QualificationTypeId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s GetQualificationTypeInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetQualificationTypeInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *GetQualificationTypeInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "GetQualificationTypeInput"} if s.QualificationTypeId == nil { invalidParams.Add(request.NewErrParamRequired("QualificationTypeId")) } if s.QualificationTypeId != nil && len(*s.QualificationTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("QualificationTypeId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *GetQualificationTypeInput) SetQualificationTypeId(v string) *GetQualificationTypeInput { s.QualificationTypeId = &v return s } type GetQualificationTypeOutput struct { _ struct{} `type:"structure"` // The returned Qualification Type QualificationType *QualificationType `type:"structure"` } // String returns the string representation func (s GetQualificationTypeOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s GetQualificationTypeOutput) GoString() string { return s.String() } // SetQualificationType sets the QualificationType field's value. func (s *GetQualificationTypeOutput) SetQualificationType(v *QualificationType) *GetQualificationTypeOutput { s.QualificationType = v return s } // The HIT data structure represents a single HIT, including all the information // necessary for a Worker to accept and complete the HIT. type HIT struct { _ struct{} `type:"structure"` // The length of time, in seconds, that a Worker has to complete the HIT after // accepting it. AssignmentDurationInSeconds *int64 `type:"long"` // The amount of time, in seconds, after the Worker submits an assignment for // the HIT that the results are automatically approved by Amazon Mechanical // Turk. This is the amount of time the Requester has to reject an assignment // submitted by a Worker before the assignment is auto-approved and the Worker // is paid. AutoApprovalDelayInSeconds *int64 `type:"long"` // The date and time the HIT was created. CreationTime *time.Time `type:"timestamp"` // A general description of the HIT. Description *string `type:"string"` // The date and time the HIT expires. Expiration *time.Time `type:"timestamp"` // The ID of the HIT Group of this HIT. HITGroupId *string `min:"1" type:"string"` // A unique identifier for the HIT. HITId *string `min:"1" type:"string"` // The ID of the HIT Layout of this HIT. HITLayoutId *string `min:"1" type:"string"` // Indicates the review status of the HIT. Valid Values are NotReviewed | MarkedForReview // | ReviewedAppropriate | ReviewedInappropriate. HITReviewStatus *string `type:"string" enum:"HITReviewStatus"` // The status of the HIT and its assignments. Valid Values are Assignable | // Unassignable | Reviewable | Reviewing | Disposed. HITStatus *string `type:"string" enum:"HITStatus"` // The ID of the HIT type of this HIT HITTypeId *string `min:"1" type:"string"` // One or more words or phrases that describe the HIT, separated by commas. // Search terms similar to the keywords of a HIT are more likely to have the // HIT in the search results. Keywords *string `type:"string"` // The number of times the HIT can be accepted and completed before the HIT // becomes unavailable. MaxAssignments *int64 `type:"integer"` // The number of assignments for this HIT that are available for Workers to // accept. NumberOfAssignmentsAvailable *int64 `type:"integer"` // The number of assignments for this HIT that have been approved or rejected. NumberOfAssignmentsCompleted *int64 `type:"integer"` // The number of assignments for this HIT that are being previewed or have been // accepted by Workers, but have not yet been submitted, returned, or abandoned. NumberOfAssignmentsPending *int64 `type:"integer"` // Conditions that a Worker's Qualifications must meet in order to accept the // HIT. A HIT can have between zero and ten Qualification requirements. All // requirements must be met in order for a Worker to accept the HIT. Additionally, // other actions can be restricted using the ActionsGuarded field on each QualificationRequirement // structure. QualificationRequirements []*QualificationRequirement `type:"list"` // The data the Worker completing the HIT uses produce the results. This is // either either a QuestionForm, HTMLQuestion or an ExternalQuestion data structure. Question *string `type:"string"` // An arbitrary data field the Requester who created the HIT can use. This field // is visible only to the creator of the HIT. RequesterAnnotation *string `type:"string"` // A string representing a currency amount. Reward *string `type:"string"` // The title of the HIT. Title *string `type:"string"` } // String returns the string representation func (s HIT) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s HIT) GoString() string { return s.String() } // SetAssignmentDurationInSeconds sets the AssignmentDurationInSeconds field's value. func (s *HIT) SetAssignmentDurationInSeconds(v int64) *HIT { s.AssignmentDurationInSeconds = &v return s } // SetAutoApprovalDelayInSeconds sets the AutoApprovalDelayInSeconds field's value. func (s *HIT) SetAutoApprovalDelayInSeconds(v int64) *HIT { s.AutoApprovalDelayInSeconds = &v return s } // SetCreationTime sets the CreationTime field's value. func (s *HIT) SetCreationTime(v time.Time) *HIT { s.CreationTime = &v return s } // SetDescription sets the Description field's value. func (s *HIT) SetDescription(v string) *HIT { s.Description = &v return s } // SetExpiration sets the Expiration field's value. func (s *HIT) SetExpiration(v time.Time) *HIT { s.Expiration = &v return s } // SetHITGroupId sets the HITGroupId field's value. func (s *HIT) SetHITGroupId(v string) *HIT { s.HITGroupId = &v return s } // SetHITId sets the HITId field's value. func (s *HIT) SetHITId(v string) *HIT { s.HITId = &v return s } // SetHITLayoutId sets the HITLayoutId field's value. func (s *HIT) SetHITLayoutId(v string) *HIT { s.HITLayoutId = &v return s } // SetHITReviewStatus sets the HITReviewStatus field's value. func (s *HIT) SetHITReviewStatus(v string) *HIT { s.HITReviewStatus = &v return s } // SetHITStatus sets the HITStatus field's value. func (s *HIT) SetHITStatus(v string) *HIT { s.HITStatus = &v return s } // SetHITTypeId sets the HITTypeId field's value. func (s *HIT) SetHITTypeId(v string) *HIT { s.HITTypeId = &v return s } // SetKeywords sets the Keywords field's value. func (s *HIT) SetKeywords(v string) *HIT { s.Keywords = &v return s } // SetMaxAssignments sets the MaxAssignments field's value. func (s *HIT) SetMaxAssignments(v int64) *HIT { s.MaxAssignments = &v return s } // SetNumberOfAssignmentsAvailable sets the NumberOfAssignmentsAvailable field's value. func (s *HIT) SetNumberOfAssignmentsAvailable(v int64) *HIT { s.NumberOfAssignmentsAvailable = &v return s } // SetNumberOfAssignmentsCompleted sets the NumberOfAssignmentsCompleted field's value. func (s *HIT) SetNumberOfAssignmentsCompleted(v int64) *HIT { s.NumberOfAssignmentsCompleted = &v return s } // SetNumberOfAssignmentsPending sets the NumberOfAssignmentsPending field's value. func (s *HIT) SetNumberOfAssignmentsPending(v int64) *HIT { s.NumberOfAssignmentsPending = &v return s } // SetQualificationRequirements sets the QualificationRequirements field's value. func (s *HIT) SetQualificationRequirements(v []*QualificationRequirement) *HIT { s.QualificationRequirements = v return s } // SetQuestion sets the Question field's value. func (s *HIT) SetQuestion(v string) *HIT { s.Question = &v return s } // SetRequesterAnnotation sets the RequesterAnnotation field's value. func (s *HIT) SetRequesterAnnotation(v string) *HIT { s.RequesterAnnotation = &v return s } // SetReward sets the Reward field's value. func (s *HIT) SetReward(v string) *HIT { s.Reward = &v return s } // SetTitle sets the Title field's value. func (s *HIT) SetTitle(v string) *HIT { s.Title = &v return s } // The HITLayoutParameter data structure defines parameter values used with // a HITLayout. A HITLayout is a reusable Amazon Mechanical Turk project template // used to provide Human Intelligence Task (HIT) question data for CreateHIT. type HITLayoutParameter struct { _ struct{} `type:"structure"` // The name of the parameter in the HITLayout. // // Name is a required field Name *string `type:"string" required:"true"` // The value substituted for the parameter referenced in the HITLayout. // // Value is a required field Value *string `type:"string" required:"true"` } // String returns the string representation func (s HITLayoutParameter) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s HITLayoutParameter) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *HITLayoutParameter) Validate() error { invalidParams := request.ErrInvalidParams{Context: "HITLayoutParameter"} if s.Name == nil { invalidParams.Add(request.NewErrParamRequired("Name")) } if s.Value == nil { invalidParams.Add(request.NewErrParamRequired("Value")) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetName sets the Name field's value. func (s *HITLayoutParameter) SetName(v string) *HITLayoutParameter { s.Name = &v return s } // SetValue sets the Value field's value. func (s *HITLayoutParameter) SetValue(v string) *HITLayoutParameter { s.Value = &v return s } type ListAssignmentsForHITInput struct { _ struct{} `type:"structure"` // The status of the assignments to return: Submitted | Approved | Rejected AssignmentStatuses []*string `type:"list"` // The ID of the HIT. // // HITId is a required field HITId *string `min:"1" type:"string" required:"true"` MaxResults *int64 `min:"1" type:"integer"` // Pagination token NextToken *string `min:"1" type:"string"` } // String returns the string representation func (s ListAssignmentsForHITInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListAssignmentsForHITInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *ListAssignmentsForHITInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "ListAssignmentsForHITInput"} if s.HITId == nil { invalidParams.Add(request.NewErrParamRequired("HITId")) } if s.HITId != nil && len(*s.HITId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITId", 1)) } if s.MaxResults != nil && *s.MaxResults < 1 { invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) } if s.NextToken != nil && len(*s.NextToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("NextToken", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAssignmentStatuses sets the AssignmentStatuses field's value. func (s *ListAssignmentsForHITInput) SetAssignmentStatuses(v []*string) *ListAssignmentsForHITInput { s.AssignmentStatuses = v return s } // SetHITId sets the HITId field's value. func (s *ListAssignmentsForHITInput) SetHITId(v string) *ListAssignmentsForHITInput { s.HITId = &v return s } // SetMaxResults sets the MaxResults field's value. func (s *ListAssignmentsForHITInput) SetMaxResults(v int64) *ListAssignmentsForHITInput { s.MaxResults = &v return s } // SetNextToken sets the NextToken field's value. func (s *ListAssignmentsForHITInput) SetNextToken(v string) *ListAssignmentsForHITInput { s.NextToken = &v return s } type ListAssignmentsForHITOutput struct { _ struct{} `type:"structure"` // The collection of Assignment data structures returned by this call. Assignments []*Assignment `type:"list"` // If the previous response was incomplete (because there is more data to retrieve), // Amazon Mechanical Turk returns a pagination token in the response. You can // use this pagination token to retrieve the next set of results. NextToken *string `min:"1" type:"string"` // The number of assignments on the page in the filtered results list, equivalent // to the number of assignments returned by this call. NumResults *int64 `type:"integer"` } // String returns the string representation func (s ListAssignmentsForHITOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListAssignmentsForHITOutput) GoString() string { return s.String() } // SetAssignments sets the Assignments field's value. func (s *ListAssignmentsForHITOutput) SetAssignments(v []*Assignment) *ListAssignmentsForHITOutput { s.Assignments = v return s } // SetNextToken sets the NextToken field's value. func (s *ListAssignmentsForHITOutput) SetNextToken(v string) *ListAssignmentsForHITOutput { s.NextToken = &v return s } // SetNumResults sets the NumResults field's value. func (s *ListAssignmentsForHITOutput) SetNumResults(v int64) *ListAssignmentsForHITOutput { s.NumResults = &v return s } type ListBonusPaymentsInput struct { _ struct{} `type:"structure"` // The ID of the assignment associated with the bonus payments to retrieve. // If specified, only bonus payments for the given assignment are returned. // Either the HITId parameter or the AssignmentId parameter must be specified AssignmentId *string `min:"1" type:"string"` // The ID of the HIT associated with the bonus payments to retrieve. If not // specified, all bonus payments for all assignments for the given HIT are returned. // Either the HITId parameter or the AssignmentId parameter must be specified HITId *string `min:"1" type:"string"` MaxResults *int64 `min:"1" type:"integer"` // Pagination token NextToken *string `min:"1" type:"string"` } // String returns the string representation func (s ListBonusPaymentsInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListBonusPaymentsInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *ListBonusPaymentsInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "ListBonusPaymentsInput"} if s.AssignmentId != nil && len(*s.AssignmentId) < 1 { invalidParams.Add(request.NewErrParamMinLen("AssignmentId", 1)) } if s.HITId != nil && len(*s.HITId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITId", 1)) } if s.MaxResults != nil && *s.MaxResults < 1 { invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) } if s.NextToken != nil && len(*s.NextToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("NextToken", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAssignmentId sets the AssignmentId field's value. func (s *ListBonusPaymentsInput) SetAssignmentId(v string) *ListBonusPaymentsInput { s.AssignmentId = &v return s } // SetHITId sets the HITId field's value. func (s *ListBonusPaymentsInput) SetHITId(v string) *ListBonusPaymentsInput { s.HITId = &v return s } // SetMaxResults sets the MaxResults field's value. func (s *ListBonusPaymentsInput) SetMaxResults(v int64) *ListBonusPaymentsInput { s.MaxResults = &v return s } // SetNextToken sets the NextToken field's value. func (s *ListBonusPaymentsInput) SetNextToken(v string) *ListBonusPaymentsInput { s.NextToken = &v return s } type ListBonusPaymentsOutput struct { _ struct{} `type:"structure"` // A successful request to the ListBonusPayments operation returns a list of // BonusPayment objects. BonusPayments []*BonusPayment `type:"list"` // If the previous response was incomplete (because there is more data to retrieve), // Amazon Mechanical Turk returns a pagination token in the response. You can // use this pagination token to retrieve the next set of results. NextToken *string `min:"1" type:"string"` // The number of bonus payments on this page in the filtered results list, equivalent // to the number of bonus payments being returned by this call. NumResults *int64 `type:"integer"` } // String returns the string representation func (s ListBonusPaymentsOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListBonusPaymentsOutput) GoString() string { return s.String() } // SetBonusPayments sets the BonusPayments field's value. func (s *ListBonusPaymentsOutput) SetBonusPayments(v []*BonusPayment) *ListBonusPaymentsOutput { s.BonusPayments = v return s } // SetNextToken sets the NextToken field's value. func (s *ListBonusPaymentsOutput) SetNextToken(v string) *ListBonusPaymentsOutput { s.NextToken = &v return s } // SetNumResults sets the NumResults field's value. func (s *ListBonusPaymentsOutput) SetNumResults(v int64) *ListBonusPaymentsOutput { s.NumResults = &v return s } type ListHITsForQualificationTypeInput struct { _ struct{} `type:"structure"` // Limit the number of results returned. MaxResults *int64 `min:"1" type:"integer"` // Pagination Token NextToken *string `min:"1" type:"string"` // The ID of the Qualification type to use when querying HITs. // // QualificationTypeId is a required field QualificationTypeId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s ListHITsForQualificationTypeInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListHITsForQualificationTypeInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *ListHITsForQualificationTypeInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "ListHITsForQualificationTypeInput"} if s.MaxResults != nil && *s.MaxResults < 1 { invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) } if s.NextToken != nil && len(*s.NextToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("NextToken", 1)) } if s.QualificationTypeId == nil { invalidParams.Add(request.NewErrParamRequired("QualificationTypeId")) } if s.QualificationTypeId != nil && len(*s.QualificationTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("QualificationTypeId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetMaxResults sets the MaxResults field's value. func (s *ListHITsForQualificationTypeInput) SetMaxResults(v int64) *ListHITsForQualificationTypeInput { s.MaxResults = &v return s } // SetNextToken sets the NextToken field's value. func (s *ListHITsForQualificationTypeInput) SetNextToken(v string) *ListHITsForQualificationTypeInput { s.NextToken = &v return s } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *ListHITsForQualificationTypeInput) SetQualificationTypeId(v string) *ListHITsForQualificationTypeInput { s.QualificationTypeId = &v return s } type ListHITsForQualificationTypeOutput struct { _ struct{} `type:"structure"` // The list of HIT elements returned by the query. HITs []*HIT `type:"list"` // If the previous response was incomplete (because there is more data to retrieve), // Amazon Mechanical Turk returns a pagination token in the response. You can // use this pagination token to retrieve the next set of results. NextToken *string `min:"1" type:"string"` // The number of HITs on this page in the filtered results list, equivalent // to the number of HITs being returned by this call. NumResults *int64 `type:"integer"` } // String returns the string representation func (s ListHITsForQualificationTypeOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListHITsForQualificationTypeOutput) GoString() string { return s.String() } // SetHITs sets the HITs field's value. func (s *ListHITsForQualificationTypeOutput) SetHITs(v []*HIT) *ListHITsForQualificationTypeOutput { s.HITs = v return s } // SetNextToken sets the NextToken field's value. func (s *ListHITsForQualificationTypeOutput) SetNextToken(v string) *ListHITsForQualificationTypeOutput { s.NextToken = &v return s } // SetNumResults sets the NumResults field's value. func (s *ListHITsForQualificationTypeOutput) SetNumResults(v int64) *ListHITsForQualificationTypeOutput { s.NumResults = &v return s } type ListHITsInput struct { _ struct{} `type:"structure"` MaxResults *int64 `min:"1" type:"integer"` // Pagination token NextToken *string `min:"1" type:"string"` } // String returns the string representation func (s ListHITsInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListHITsInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *ListHITsInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "ListHITsInput"} if s.MaxResults != nil && *s.MaxResults < 1 { invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) } if s.NextToken != nil && len(*s.NextToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("NextToken", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetMaxResults sets the MaxResults field's value. func (s *ListHITsInput) SetMaxResults(v int64) *ListHITsInput { s.MaxResults = &v return s } // SetNextToken sets the NextToken field's value. func (s *ListHITsInput) SetNextToken(v string) *ListHITsInput { s.NextToken = &v return s } type ListHITsOutput struct { _ struct{} `type:"structure"` // The list of HIT elements returned by the query. HITs []*HIT `type:"list"` // If the previous response was incomplete (because there is more data to retrieve), // Amazon Mechanical Turk returns a pagination token in the response. You can // use this pagination token to retrieve the next set of results. NextToken *string `min:"1" type:"string"` // The number of HITs on this page in the filtered results list, equivalent // to the number of HITs being returned by this call. NumResults *int64 `type:"integer"` } // String returns the string representation func (s ListHITsOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListHITsOutput) GoString() string { return s.String() } // SetHITs sets the HITs field's value. func (s *ListHITsOutput) SetHITs(v []*HIT) *ListHITsOutput { s.HITs = v return s } // SetNextToken sets the NextToken field's value. func (s *ListHITsOutput) SetNextToken(v string) *ListHITsOutput { s.NextToken = &v return s } // SetNumResults sets the NumResults field's value. func (s *ListHITsOutput) SetNumResults(v int64) *ListHITsOutput { s.NumResults = &v return s } type ListQualificationRequestsInput struct { _ struct{} `type:"structure"` // The maximum number of results to return in a single call. MaxResults *int64 `min:"1" type:"integer"` // If the previous response was incomplete (because there is more data to retrieve), // Amazon Mechanical Turk returns a pagination token in the response. You can // use this pagination token to retrieve the next set of results. NextToken *string `min:"1" type:"string"` // The ID of the QualificationType. QualificationTypeId *string `min:"1" type:"string"` } // String returns the string representation func (s ListQualificationRequestsInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListQualificationRequestsInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *ListQualificationRequestsInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "ListQualificationRequestsInput"} if s.MaxResults != nil && *s.MaxResults < 1 { invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) } if s.NextToken != nil && len(*s.NextToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("NextToken", 1)) } if s.QualificationTypeId != nil && len(*s.QualificationTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("QualificationTypeId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetMaxResults sets the MaxResults field's value. func (s *ListQualificationRequestsInput) SetMaxResults(v int64) *ListQualificationRequestsInput { s.MaxResults = &v return s } // SetNextToken sets the NextToken field's value. func (s *ListQualificationRequestsInput) SetNextToken(v string) *ListQualificationRequestsInput { s.NextToken = &v return s } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *ListQualificationRequestsInput) SetQualificationTypeId(v string) *ListQualificationRequestsInput { s.QualificationTypeId = &v return s } type ListQualificationRequestsOutput struct { _ struct{} `type:"structure"` // If the previous response was incomplete (because there is more data to retrieve), // Amazon Mechanical Turk returns a pagination token in the response. You can // use this pagination token to retrieve the next set of results. NextToken *string `min:"1" type:"string"` // The number of Qualification requests on this page in the filtered results // list, equivalent to the number of Qualification requests being returned by // this call. NumResults *int64 `type:"integer"` // The Qualification request. The response includes one QualificationRequest // element for each Qualification request returned by the query. QualificationRequests []*QualificationRequest `type:"list"` } // String returns the string representation func (s ListQualificationRequestsOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListQualificationRequestsOutput) GoString() string { return s.String() } // SetNextToken sets the NextToken field's value. func (s *ListQualificationRequestsOutput) SetNextToken(v string) *ListQualificationRequestsOutput { s.NextToken = &v return s } // SetNumResults sets the NumResults field's value. func (s *ListQualificationRequestsOutput) SetNumResults(v int64) *ListQualificationRequestsOutput { s.NumResults = &v return s } // SetQualificationRequests sets the QualificationRequests field's value. func (s *ListQualificationRequestsOutput) SetQualificationRequests(v []*QualificationRequest) *ListQualificationRequestsOutput { s.QualificationRequests = v return s } type ListQualificationTypesInput struct { _ struct{} `type:"structure"` // The maximum number of results to return in a single call. MaxResults *int64 `min:"1" type:"integer"` // Specifies that only Qualification types that the Requester created are returned. // If false, the operation returns all Qualification types. MustBeOwnedByCaller *bool `type:"boolean"` // Specifies that only Qualification types that a user can request through the // Amazon Mechanical Turk web site, such as by taking a Qualification test, // are returned as results of the search. Some Qualification types, such as // those assigned automatically by the system, cannot be requested directly // by users. If false, all Qualification types, including those managed by the // system, are considered. Valid values are True | False. // // MustBeRequestable is a required field MustBeRequestable *bool `type:"boolean" required:"true"` // If the previous response was incomplete (because there is more data to retrieve), // Amazon Mechanical Turk returns a pagination token in the response. You can // use this pagination token to retrieve the next set of results. NextToken *string `min:"1" type:"string"` // A text query against all of the searchable attributes of Qualification types. Query *string `type:"string"` } // String returns the string representation func (s ListQualificationTypesInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListQualificationTypesInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *ListQualificationTypesInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "ListQualificationTypesInput"} if s.MaxResults != nil && *s.MaxResults < 1 { invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) } if s.MustBeRequestable == nil { invalidParams.Add(request.NewErrParamRequired("MustBeRequestable")) } if s.NextToken != nil && len(*s.NextToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("NextToken", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetMaxResults sets the MaxResults field's value. func (s *ListQualificationTypesInput) SetMaxResults(v int64) *ListQualificationTypesInput { s.MaxResults = &v return s } // SetMustBeOwnedByCaller sets the MustBeOwnedByCaller field's value. func (s *ListQualificationTypesInput) SetMustBeOwnedByCaller(v bool) *ListQualificationTypesInput { s.MustBeOwnedByCaller = &v return s } // SetMustBeRequestable sets the MustBeRequestable field's value. func (s *ListQualificationTypesInput) SetMustBeRequestable(v bool) *ListQualificationTypesInput { s.MustBeRequestable = &v return s } // SetNextToken sets the NextToken field's value. func (s *ListQualificationTypesInput) SetNextToken(v string) *ListQualificationTypesInput { s.NextToken = &v return s } // SetQuery sets the Query field's value. func (s *ListQualificationTypesInput) SetQuery(v string) *ListQualificationTypesInput { s.Query = &v return s } type ListQualificationTypesOutput struct { _ struct{} `type:"structure"` // If the previous response was incomplete (because there is more data to retrieve), // Amazon Mechanical Turk returns a pagination token in the response. You can // use this pagination token to retrieve the next set of results. NextToken *string `min:"1" type:"string"` // The number of Qualification types on this page in the filtered results list, // equivalent to the number of types this operation returns. NumResults *int64 `type:"integer"` // The list of QualificationType elements returned by the query. QualificationTypes []*QualificationType `type:"list"` } // String returns the string representation func (s ListQualificationTypesOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListQualificationTypesOutput) GoString() string { return s.String() } // SetNextToken sets the NextToken field's value. func (s *ListQualificationTypesOutput) SetNextToken(v string) *ListQualificationTypesOutput { s.NextToken = &v return s } // SetNumResults sets the NumResults field's value. func (s *ListQualificationTypesOutput) SetNumResults(v int64) *ListQualificationTypesOutput { s.NumResults = &v return s } // SetQualificationTypes sets the QualificationTypes field's value. func (s *ListQualificationTypesOutput) SetQualificationTypes(v []*QualificationType) *ListQualificationTypesOutput { s.QualificationTypes = v return s } type ListReviewPolicyResultsForHITInput struct { _ struct{} `type:"structure"` // The unique identifier of the HIT to retrieve review results for. // // HITId is a required field HITId *string `min:"1" type:"string" required:"true"` // Limit the number of results returned. MaxResults *int64 `min:"1" type:"integer"` // Pagination token NextToken *string `min:"1" type:"string"` // The Policy Level(s) to retrieve review results for - HIT or Assignment. If // omitted, the default behavior is to retrieve all data for both policy levels. // For a list of all the described policies, see Review Policies. PolicyLevels []*string `type:"list"` // Specify if the operation should retrieve a list of the actions taken executing // the Review Policies and their outcomes. RetrieveActions *bool `type:"boolean"` // Specify if the operation should retrieve a list of the results computed by // the Review Policies. RetrieveResults *bool `type:"boolean"` } // String returns the string representation func (s ListReviewPolicyResultsForHITInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListReviewPolicyResultsForHITInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *ListReviewPolicyResultsForHITInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "ListReviewPolicyResultsForHITInput"} if s.HITId == nil { invalidParams.Add(request.NewErrParamRequired("HITId")) } if s.HITId != nil && len(*s.HITId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITId", 1)) } if s.MaxResults != nil && *s.MaxResults < 1 { invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) } if s.NextToken != nil && len(*s.NextToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("NextToken", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetHITId sets the HITId field's value. func (s *ListReviewPolicyResultsForHITInput) SetHITId(v string) *ListReviewPolicyResultsForHITInput { s.HITId = &v return s } // SetMaxResults sets the MaxResults field's value. func (s *ListReviewPolicyResultsForHITInput) SetMaxResults(v int64) *ListReviewPolicyResultsForHITInput { s.MaxResults = &v return s } // SetNextToken sets the NextToken field's value. func (s *ListReviewPolicyResultsForHITInput) SetNextToken(v string) *ListReviewPolicyResultsForHITInput { s.NextToken = &v return s } // SetPolicyLevels sets the PolicyLevels field's value. func (s *ListReviewPolicyResultsForHITInput) SetPolicyLevels(v []*string) *ListReviewPolicyResultsForHITInput { s.PolicyLevels = v return s } // SetRetrieveActions sets the RetrieveActions field's value. func (s *ListReviewPolicyResultsForHITInput) SetRetrieveActions(v bool) *ListReviewPolicyResultsForHITInput { s.RetrieveActions = &v return s } // SetRetrieveResults sets the RetrieveResults field's value. func (s *ListReviewPolicyResultsForHITInput) SetRetrieveResults(v bool) *ListReviewPolicyResultsForHITInput { s.RetrieveResults = &v return s } type ListReviewPolicyResultsForHITOutput struct { _ struct{} `type:"structure"` // The name of the Assignment-level Review Policy. This contains only the PolicyName // element. AssignmentReviewPolicy *ReviewPolicy `type:"structure"` // Contains both ReviewResult and ReviewAction elements for an Assignment. AssignmentReviewReport *ReviewReport `type:"structure"` // The HITId of the HIT for which results have been returned. HITId *string `min:"1" type:"string"` // The name of the HIT-level Review Policy. This contains only the PolicyName // element. HITReviewPolicy *ReviewPolicy `type:"structure"` // Contains both ReviewResult and ReviewAction elements for a particular HIT. HITReviewReport *ReviewReport `type:"structure"` // If the previous response was incomplete (because there is more data to retrieve), // Amazon Mechanical Turk returns a pagination token in the response. You can // use this pagination token to retrieve the next set of results. NextToken *string `min:"1" type:"string"` } // String returns the string representation func (s ListReviewPolicyResultsForHITOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListReviewPolicyResultsForHITOutput) GoString() string { return s.String() } // SetAssignmentReviewPolicy sets the AssignmentReviewPolicy field's value. func (s *ListReviewPolicyResultsForHITOutput) SetAssignmentReviewPolicy(v *ReviewPolicy) *ListReviewPolicyResultsForHITOutput { s.AssignmentReviewPolicy = v return s } // SetAssignmentReviewReport sets the AssignmentReviewReport field's value. func (s *ListReviewPolicyResultsForHITOutput) SetAssignmentReviewReport(v *ReviewReport) *ListReviewPolicyResultsForHITOutput { s.AssignmentReviewReport = v return s } // SetHITId sets the HITId field's value. func (s *ListReviewPolicyResultsForHITOutput) SetHITId(v string) *ListReviewPolicyResultsForHITOutput { s.HITId = &v return s } // SetHITReviewPolicy sets the HITReviewPolicy field's value. func (s *ListReviewPolicyResultsForHITOutput) SetHITReviewPolicy(v *ReviewPolicy) *ListReviewPolicyResultsForHITOutput { s.HITReviewPolicy = v return s } // SetHITReviewReport sets the HITReviewReport field's value. func (s *ListReviewPolicyResultsForHITOutput) SetHITReviewReport(v *ReviewReport) *ListReviewPolicyResultsForHITOutput { s.HITReviewReport = v return s } // SetNextToken sets the NextToken field's value. func (s *ListReviewPolicyResultsForHITOutput) SetNextToken(v string) *ListReviewPolicyResultsForHITOutput { s.NextToken = &v return s } type ListReviewableHITsInput struct { _ struct{} `type:"structure"` // The ID of the HIT type of the HITs to consider for the query. If not specified, // all HITs for the Reviewer are considered HITTypeId *string `min:"1" type:"string"` // Limit the number of results returned. MaxResults *int64 `min:"1" type:"integer"` // Pagination Token NextToken *string `min:"1" type:"string"` // Can be either Reviewable or Reviewing. Reviewable is the default value. Status *string `type:"string" enum:"ReviewableHITStatus"` } // String returns the string representation func (s ListReviewableHITsInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListReviewableHITsInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *ListReviewableHITsInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "ListReviewableHITsInput"} if s.HITTypeId != nil && len(*s.HITTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITTypeId", 1)) } if s.MaxResults != nil && *s.MaxResults < 1 { invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) } if s.NextToken != nil && len(*s.NextToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("NextToken", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetHITTypeId sets the HITTypeId field's value. func (s *ListReviewableHITsInput) SetHITTypeId(v string) *ListReviewableHITsInput { s.HITTypeId = &v return s } // SetMaxResults sets the MaxResults field's value. func (s *ListReviewableHITsInput) SetMaxResults(v int64) *ListReviewableHITsInput { s.MaxResults = &v return s } // SetNextToken sets the NextToken field's value. func (s *ListReviewableHITsInput) SetNextToken(v string) *ListReviewableHITsInput { s.NextToken = &v return s } // SetStatus sets the Status field's value. func (s *ListReviewableHITsInput) SetStatus(v string) *ListReviewableHITsInput { s.Status = &v return s } type ListReviewableHITsOutput struct { _ struct{} `type:"structure"` // The list of HIT elements returned by the query. HITs []*HIT `type:"list"` // If the previous response was incomplete (because there is more data to retrieve), // Amazon Mechanical Turk returns a pagination token in the response. You can // use this pagination token to retrieve the next set of results. NextToken *string `min:"1" type:"string"` // The number of HITs on this page in the filtered results list, equivalent // to the number of HITs being returned by this call. NumResults *int64 `type:"integer"` } // String returns the string representation func (s ListReviewableHITsOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListReviewableHITsOutput) GoString() string { return s.String() } // SetHITs sets the HITs field's value. func (s *ListReviewableHITsOutput) SetHITs(v []*HIT) *ListReviewableHITsOutput { s.HITs = v return s } // SetNextToken sets the NextToken field's value. func (s *ListReviewableHITsOutput) SetNextToken(v string) *ListReviewableHITsOutput { s.NextToken = &v return s } // SetNumResults sets the NumResults field's value. func (s *ListReviewableHITsOutput) SetNumResults(v int64) *ListReviewableHITsOutput { s.NumResults = &v return s } type ListWorkerBlocksInput struct { _ struct{} `type:"structure"` MaxResults *int64 `min:"1" type:"integer"` // Pagination token NextToken *string `min:"1" type:"string"` } // String returns the string representation func (s ListWorkerBlocksInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListWorkerBlocksInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *ListWorkerBlocksInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "ListWorkerBlocksInput"} if s.MaxResults != nil && *s.MaxResults < 1 { invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) } if s.NextToken != nil && len(*s.NextToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("NextToken", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetMaxResults sets the MaxResults field's value. func (s *ListWorkerBlocksInput) SetMaxResults(v int64) *ListWorkerBlocksInput { s.MaxResults = &v return s } // SetNextToken sets the NextToken field's value. func (s *ListWorkerBlocksInput) SetNextToken(v string) *ListWorkerBlocksInput { s.NextToken = &v return s } type ListWorkerBlocksOutput struct { _ struct{} `type:"structure"` // If the previous response was incomplete (because there is more data to retrieve), // Amazon Mechanical Turk returns a pagination token in the response. You can // use this pagination token to retrieve the next set of results. NextToken *string `min:"1" type:"string"` // The number of assignments on the page in the filtered results list, equivalent // to the number of assignments returned by this call. NumResults *int64 `type:"integer"` // The list of WorkerBlocks, containing the collection of Worker IDs and reasons // for blocking. WorkerBlocks []*WorkerBlock `type:"list"` } // String returns the string representation func (s ListWorkerBlocksOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListWorkerBlocksOutput) GoString() string { return s.String() } // SetNextToken sets the NextToken field's value. func (s *ListWorkerBlocksOutput) SetNextToken(v string) *ListWorkerBlocksOutput { s.NextToken = &v return s } // SetNumResults sets the NumResults field's value. func (s *ListWorkerBlocksOutput) SetNumResults(v int64) *ListWorkerBlocksOutput { s.NumResults = &v return s } // SetWorkerBlocks sets the WorkerBlocks field's value. func (s *ListWorkerBlocksOutput) SetWorkerBlocks(v []*WorkerBlock) *ListWorkerBlocksOutput { s.WorkerBlocks = v return s } type ListWorkersWithQualificationTypeInput struct { _ struct{} `type:"structure"` // Limit the number of results returned. MaxResults *int64 `min:"1" type:"integer"` // Pagination Token NextToken *string `min:"1" type:"string"` // The ID of the Qualification type of the Qualifications to return. // // QualificationTypeId is a required field QualificationTypeId *string `min:"1" type:"string" required:"true"` // The status of the Qualifications to return. Can be Granted | Revoked. Status *string `type:"string" enum:"QualificationStatus"` } // String returns the string representation func (s ListWorkersWithQualificationTypeInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListWorkersWithQualificationTypeInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *ListWorkersWithQualificationTypeInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "ListWorkersWithQualificationTypeInput"} if s.MaxResults != nil && *s.MaxResults < 1 { invalidParams.Add(request.NewErrParamMinValue("MaxResults", 1)) } if s.NextToken != nil && len(*s.NextToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("NextToken", 1)) } if s.QualificationTypeId == nil { invalidParams.Add(request.NewErrParamRequired("QualificationTypeId")) } if s.QualificationTypeId != nil && len(*s.QualificationTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("QualificationTypeId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetMaxResults sets the MaxResults field's value. func (s *ListWorkersWithQualificationTypeInput) SetMaxResults(v int64) *ListWorkersWithQualificationTypeInput { s.MaxResults = &v return s } // SetNextToken sets the NextToken field's value. func (s *ListWorkersWithQualificationTypeInput) SetNextToken(v string) *ListWorkersWithQualificationTypeInput { s.NextToken = &v return s } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *ListWorkersWithQualificationTypeInput) SetQualificationTypeId(v string) *ListWorkersWithQualificationTypeInput { s.QualificationTypeId = &v return s } // SetStatus sets the Status field's value. func (s *ListWorkersWithQualificationTypeInput) SetStatus(v string) *ListWorkersWithQualificationTypeInput { s.Status = &v return s } type ListWorkersWithQualificationTypeOutput struct { _ struct{} `type:"structure"` // If the previous response was incomplete (because there is more data to retrieve), // Amazon Mechanical Turk returns a pagination token in the response. You can // use this pagination token to retrieve the next set of results. NextToken *string `min:"1" type:"string"` // The number of Qualifications on this page in the filtered results list, equivalent // to the number of Qualifications being returned by this call. NumResults *int64 `type:"integer"` // The list of Qualification elements returned by this call. Qualifications []*Qualification `type:"list"` } // String returns the string representation func (s ListWorkersWithQualificationTypeOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ListWorkersWithQualificationTypeOutput) GoString() string { return s.String() } // SetNextToken sets the NextToken field's value. func (s *ListWorkersWithQualificationTypeOutput) SetNextToken(v string) *ListWorkersWithQualificationTypeOutput { s.NextToken = &v return s } // SetNumResults sets the NumResults field's value. func (s *ListWorkersWithQualificationTypeOutput) SetNumResults(v int64) *ListWorkersWithQualificationTypeOutput { s.NumResults = &v return s } // SetQualifications sets the Qualifications field's value. func (s *ListWorkersWithQualificationTypeOutput) SetQualifications(v []*Qualification) *ListWorkersWithQualificationTypeOutput { s.Qualifications = v return s } // The Locale data structure represents a geographical region or location. type Locale struct { _ struct{} `type:"structure"` // The country of the locale. Must be a valid ISO 3166 country code. For example, // the code US refers to the United States of America. // // Country is a required field Country *string `min:"2" type:"string" required:"true"` // The state or subdivision of the locale. A valid ISO 3166-2 subdivision code. // For example, the code WA refers to the state of Washington. Subdivision *string `min:"2" type:"string"` } // String returns the string representation func (s Locale) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s Locale) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *Locale) Validate() error { invalidParams := request.ErrInvalidParams{Context: "Locale"} if s.Country == nil { invalidParams.Add(request.NewErrParamRequired("Country")) } if s.Country != nil && len(*s.Country) < 2 { invalidParams.Add(request.NewErrParamMinLen("Country", 2)) } if s.Subdivision != nil && len(*s.Subdivision) < 2 { invalidParams.Add(request.NewErrParamMinLen("Subdivision", 2)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetCountry sets the Country field's value. func (s *Locale) SetCountry(v string) *Locale { s.Country = &v return s } // SetSubdivision sets the Subdivision field's value. func (s *Locale) SetSubdivision(v string) *Locale { s.Subdivision = &v return s } // The NotificationSpecification data structure describes a HIT event notification // for a HIT type. type NotificationSpecification struct { _ struct{} `type:"structure"` // The target for notification messages. The Destination’s format is determined // by the specified Transport: // // * When Transport is Email, the Destination is your email address. // // * When Transport is SQS, the Destination is your queue URL. // // * When Transport is SNS, the Destination is the ARN of your topic. // // Destination is a required field Destination *string `type:"string" required:"true"` // The list of events that should cause notifications to be sent. Valid Values: // AssignmentAccepted | AssignmentAbandoned | AssignmentReturned | AssignmentSubmitted // | AssignmentRejected | AssignmentApproved | HITCreated | HITExtended | HITDisposed // | HITReviewable | HITExpired | Ping. The Ping event is only valid for the // SendTestEventNotification operation. // // EventTypes is a required field EventTypes []*string `type:"list" required:"true"` // The method Amazon Mechanical Turk uses to send the notification. Valid Values: // Email | SQS | SNS. // // Transport is a required field Transport *string `type:"string" required:"true" enum:"NotificationTransport"` // The version of the Notification API to use. Valid value is 2006-05-05. // // Version is a required field Version *string `type:"string" required:"true"` } // String returns the string representation func (s NotificationSpecification) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s NotificationSpecification) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *NotificationSpecification) Validate() error { invalidParams := request.ErrInvalidParams{Context: "NotificationSpecification"} if s.Destination == nil { invalidParams.Add(request.NewErrParamRequired("Destination")) } if s.EventTypes == nil { invalidParams.Add(request.NewErrParamRequired("EventTypes")) } if s.Transport == nil { invalidParams.Add(request.NewErrParamRequired("Transport")) } if s.Version == nil { invalidParams.Add(request.NewErrParamRequired("Version")) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetDestination sets the Destination field's value. func (s *NotificationSpecification) SetDestination(v string) *NotificationSpecification { s.Destination = &v return s } // SetEventTypes sets the EventTypes field's value. func (s *NotificationSpecification) SetEventTypes(v []*string) *NotificationSpecification { s.EventTypes = v return s } // SetTransport sets the Transport field's value. func (s *NotificationSpecification) SetTransport(v string) *NotificationSpecification { s.Transport = &v return s } // SetVersion sets the Version field's value. func (s *NotificationSpecification) SetVersion(v string) *NotificationSpecification { s.Version = &v return s } // When MTurk encounters an issue with notifying the Workers you specified, // it returns back this object with failure details. type NotifyWorkersFailureStatus struct { _ struct{} `type:"structure"` // Encoded value for the failure type. NotifyWorkersFailureCode *string `type:"string" enum:"NotifyWorkersFailureCode"` // A message detailing the reason the Worker could not be notified. NotifyWorkersFailureMessage *string `type:"string"` // The ID of the Worker. WorkerId *string `min:"1" type:"string"` } // String returns the string representation func (s NotifyWorkersFailureStatus) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s NotifyWorkersFailureStatus) GoString() string { return s.String() } // SetNotifyWorkersFailureCode sets the NotifyWorkersFailureCode field's value. func (s *NotifyWorkersFailureStatus) SetNotifyWorkersFailureCode(v string) *NotifyWorkersFailureStatus { s.NotifyWorkersFailureCode = &v return s } // SetNotifyWorkersFailureMessage sets the NotifyWorkersFailureMessage field's value. func (s *NotifyWorkersFailureStatus) SetNotifyWorkersFailureMessage(v string) *NotifyWorkersFailureStatus { s.NotifyWorkersFailureMessage = &v return s } // SetWorkerId sets the WorkerId field's value. func (s *NotifyWorkersFailureStatus) SetWorkerId(v string) *NotifyWorkersFailureStatus { s.WorkerId = &v return s } type NotifyWorkersInput struct { _ struct{} `type:"structure"` // The text of the email message to send. Can include up to 4,096 characters // // MessageText is a required field MessageText *string `type:"string" required:"true"` // The subject line of the email message to send. Can include up to 200 characters. // // Subject is a required field Subject *string `type:"string" required:"true"` // A list of Worker IDs you wish to notify. You can notify upto 100 Workers // at a time. // // WorkerIds is a required field WorkerIds []*string `type:"list" required:"true"` } // String returns the string representation func (s NotifyWorkersInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s NotifyWorkersInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *NotifyWorkersInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "NotifyWorkersInput"} if s.MessageText == nil { invalidParams.Add(request.NewErrParamRequired("MessageText")) } if s.Subject == nil { invalidParams.Add(request.NewErrParamRequired("Subject")) } if s.WorkerIds == nil { invalidParams.Add(request.NewErrParamRequired("WorkerIds")) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetMessageText sets the MessageText field's value. func (s *NotifyWorkersInput) SetMessageText(v string) *NotifyWorkersInput { s.MessageText = &v return s } // SetSubject sets the Subject field's value. func (s *NotifyWorkersInput) SetSubject(v string) *NotifyWorkersInput { s.Subject = &v return s } // SetWorkerIds sets the WorkerIds field's value. func (s *NotifyWorkersInput) SetWorkerIds(v []*string) *NotifyWorkersInput { s.WorkerIds = v return s } type NotifyWorkersOutput struct { _ struct{} `type:"structure"` // When MTurk sends notifications to the list of Workers, it returns back any // failures it encounters in this list of NotifyWorkersFailureStatus objects. NotifyWorkersFailureStatuses []*NotifyWorkersFailureStatus `type:"list"` } // String returns the string representation func (s NotifyWorkersOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s NotifyWorkersOutput) GoString() string { return s.String() } // SetNotifyWorkersFailureStatuses sets the NotifyWorkersFailureStatuses field's value. func (s *NotifyWorkersOutput) SetNotifyWorkersFailureStatuses(v []*NotifyWorkersFailureStatus) *NotifyWorkersOutput { s.NotifyWorkersFailureStatuses = v return s } // This data structure is the data type for the AnswerKey parameter of the ScoreMyKnownAnswers/2011-09-01 // Review Policy. type ParameterMapEntry struct { _ struct{} `type:"structure"` // The QuestionID from the HIT that is used to identify which question requires // Mechanical Turk to score as part of the ScoreMyKnownAnswers/2011-09-01 Review // Policy. Key *string `type:"string"` // The list of answers to the question specified in the MapEntry Key element. // The Worker must match all values in order for the answer to be scored correctly. Values []*string `type:"list"` } // String returns the string representation func (s ParameterMapEntry) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ParameterMapEntry) GoString() string { return s.String() } // SetKey sets the Key field's value. func (s *ParameterMapEntry) SetKey(v string) *ParameterMapEntry { s.Key = &v return s } // SetValues sets the Values field's value. func (s *ParameterMapEntry) SetValues(v []*string) *ParameterMapEntry { s.Values = v return s } // Name of the parameter from the Review policy. type PolicyParameter struct { _ struct{} `type:"structure"` // Name of the parameter from the list of Review Polices. Key *string `type:"string"` // List of ParameterMapEntry objects. MapEntries []*ParameterMapEntry `type:"list"` // The list of values of the Parameter Values []*string `type:"list"` } // String returns the string representation func (s PolicyParameter) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s PolicyParameter) GoString() string { return s.String() } // SetKey sets the Key field's value. func (s *PolicyParameter) SetKey(v string) *PolicyParameter { s.Key = &v return s } // SetMapEntries sets the MapEntries field's value. func (s *PolicyParameter) SetMapEntries(v []*ParameterMapEntry) *PolicyParameter { s.MapEntries = v return s } // SetValues sets the Values field's value. func (s *PolicyParameter) SetValues(v []*string) *PolicyParameter { s.Values = v return s } // The Qualification data structure represents a Qualification assigned to a // user, including the Qualification type and the value (score). type Qualification struct { _ struct{} `type:"structure"` // The date and time the Qualification was granted to the Worker. If the Worker's // Qualification was revoked, and then re-granted based on a new Qualification // request, GrantTime is the date and time of the last call to the AcceptQualificationRequest // operation. GrantTime *time.Time `type:"timestamp"` // The value (score) of the Qualification, if the Qualification has an integer // value. IntegerValue *int64 `type:"integer"` // The Locale data structure represents a geographical region or location. LocaleValue *Locale `type:"structure"` // The ID of the Qualification type for the Qualification. QualificationTypeId *string `min:"1" type:"string"` // The status of the Qualification. Valid values are Granted | Revoked. Status *string `type:"string" enum:"QualificationStatus"` // The ID of the Worker who possesses the Qualification. WorkerId *string `min:"1" type:"string"` } // String returns the string representation func (s Qualification) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s Qualification) GoString() string { return s.String() } // SetGrantTime sets the GrantTime field's value. func (s *Qualification) SetGrantTime(v time.Time) *Qualification { s.GrantTime = &v return s } // SetIntegerValue sets the IntegerValue field's value. func (s *Qualification) SetIntegerValue(v int64) *Qualification { s.IntegerValue = &v return s } // SetLocaleValue sets the LocaleValue field's value. func (s *Qualification) SetLocaleValue(v *Locale) *Qualification { s.LocaleValue = v return s } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *Qualification) SetQualificationTypeId(v string) *Qualification { s.QualificationTypeId = &v return s } // SetStatus sets the Status field's value. func (s *Qualification) SetStatus(v string) *Qualification { s.Status = &v return s } // SetWorkerId sets the WorkerId field's value. func (s *Qualification) SetWorkerId(v string) *Qualification { s.WorkerId = &v return s } // The QualificationRequest data structure represents a request a Worker has // made for a Qualification. type QualificationRequest struct { _ struct{} `type:"structure"` // The Worker's answers for the Qualification type's test contained in a QuestionFormAnswers // document, if the type has a test and the Worker has submitted answers. If // the Worker does not provide any answers, Answer may be empty. Answer *string `type:"string"` // The ID of the Qualification request, a unique identifier generated when the // request was submitted. QualificationRequestId *string `type:"string"` // The ID of the Qualification type the Worker is requesting, as returned by // the CreateQualificationType operation. QualificationTypeId *string `min:"1" type:"string"` // The date and time the Qualification request had a status of Submitted. This // is either the time the Worker submitted answers for a Qualification test, // or the time the Worker requested the Qualification if the Qualification type // does not have a test. SubmitTime *time.Time `type:"timestamp"` // The contents of the Qualification test that was presented to the Worker, // if the type has a test and the Worker has submitted answers. This value is // identical to the QuestionForm associated with the Qualification type at the // time the Worker requests the Qualification. Test *string `type:"string"` // The ID of the Worker requesting the Qualification. WorkerId *string `min:"1" type:"string"` } // String returns the string representation func (s QualificationRequest) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s QualificationRequest) GoString() string { return s.String() } // SetAnswer sets the Answer field's value. func (s *QualificationRequest) SetAnswer(v string) *QualificationRequest { s.Answer = &v return s } // SetQualificationRequestId sets the QualificationRequestId field's value. func (s *QualificationRequest) SetQualificationRequestId(v string) *QualificationRequest { s.QualificationRequestId = &v return s } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *QualificationRequest) SetQualificationTypeId(v string) *QualificationRequest { s.QualificationTypeId = &v return s } // SetSubmitTime sets the SubmitTime field's value. func (s *QualificationRequest) SetSubmitTime(v time.Time) *QualificationRequest { s.SubmitTime = &v return s } // SetTest sets the Test field's value. func (s *QualificationRequest) SetTest(v string) *QualificationRequest { s.Test = &v return s } // SetWorkerId sets the WorkerId field's value. func (s *QualificationRequest) SetWorkerId(v string) *QualificationRequest { s.WorkerId = &v return s } // The QualificationRequirement data structure describes a Qualification that // a Worker must have before the Worker is allowed to accept a HIT. A requirement // may optionally state that a Worker must have the Qualification in order to // preview the HIT, or see the HIT in search results. type QualificationRequirement struct { _ struct{} `type:"structure"` // Setting this attribute prevents Workers whose Qualifications do not meet // this QualificationRequirement from taking the specified action. Valid arguments // include "Accept" (Worker cannot accept the HIT, but can preview the HIT and // see it in their search results), "PreviewAndAccept" (Worker cannot accept // or preview the HIT, but can see the HIT in their search results), and "DiscoverPreviewAndAccept" // (Worker cannot accept, preview, or see the HIT in their search results). // It's possible for you to create a HIT with multiple QualificationRequirements // (which can have different values for the ActionGuarded attribute). In this // case, the Worker is only permitted to perform an action when they have met // all QualificationRequirements guarding the action. The actions in the order // of least restrictive to most restrictive are Discover, Preview and Accept. // For example, if a Worker meets all QualificationRequirements that are set // to DiscoverPreviewAndAccept, but do not meet all requirements that are set // with PreviewAndAccept, then the Worker will be able to Discover, i.e. see // the HIT in their search result, but will not be able to Preview or Accept // the HIT. ActionsGuarded should not be used in combination with the RequiredToPreview // field. ActionsGuarded *string `type:"string" enum:"HITAccessActions"` // The kind of comparison to make against a Qualification's value. You can compare // a Qualification's value to an IntegerValue to see if it is LessThan, LessThanOrEqualTo, // GreaterThan, GreaterThanOrEqualTo, EqualTo, or NotEqualTo the IntegerValue. // You can compare it to a LocaleValue to see if it is EqualTo, or NotEqualTo // the LocaleValue. You can check to see if the value is In or NotIn a set of // IntegerValue or LocaleValue values. Lastly, a Qualification requirement can // also test if a Qualification Exists or DoesNotExist in the user's profile, // regardless of its value. // // Comparator is a required field Comparator *string `type:"string" required:"true" enum:"Comparator"` // The integer value to compare against the Qualification's value. IntegerValue // must not be present if Comparator is Exists or DoesNotExist. IntegerValue // can only be used if the Qualification type has an integer value; it cannot // be used with the Worker_Locale QualificationType ID. When performing a set // comparison by using the In or the NotIn comparator, you can use up to 15 // IntegerValue elements in a QualificationRequirement data structure. IntegerValues []*int64 `type:"list"` // The locale value to compare against the Qualification's value. The local // value must be a valid ISO 3166 country code or supports ISO 3166-2 subdivisions. // LocaleValue can only be used with a Worker_Locale QualificationType ID. LocaleValue // can only be used with the EqualTo, NotEqualTo, In, and NotIn comparators. // You must only use a single LocaleValue element when using the EqualTo or // NotEqualTo comparators. When performing a set comparison by using the In // or the NotIn comparator, you can use up to 30 LocaleValue elements in a QualificationRequirement // data structure. LocaleValues []*Locale `type:"list"` // The ID of the Qualification type for the requirement. // // QualificationTypeId is a required field QualificationTypeId *string `type:"string" required:"true"` // DEPRECATED: Use the ActionsGuarded field instead. If RequiredToPreview is // true, the question data for the HIT will not be shown when a Worker whose // Qualifications do not meet this requirement tries to preview the HIT. That // is, a Worker's Qualifications must meet all of the requirements for which // RequiredToPreview is true in order to preview the HIT. If a Worker meets // all of the requirements where RequiredToPreview is true (or if there are // no such requirements), but does not meet all of the requirements for the // HIT, the Worker will be allowed to preview the HIT's question data, but will // not be allowed to accept and complete the HIT. The default is false. This // should not be used in combination with the ActionsGuarded field. // // Deprecated: RequiredToPreview has been deprecated RequiredToPreview *bool `deprecated:"true" type:"boolean"` } // String returns the string representation func (s QualificationRequirement) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s QualificationRequirement) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *QualificationRequirement) Validate() error { invalidParams := request.ErrInvalidParams{Context: "QualificationRequirement"} if s.Comparator == nil { invalidParams.Add(request.NewErrParamRequired("Comparator")) } if s.QualificationTypeId == nil { invalidParams.Add(request.NewErrParamRequired("QualificationTypeId")) } if s.LocaleValues != nil { for i, v := range s.LocaleValues { if v == nil { continue } if err := v.Validate(); err != nil { invalidParams.AddNested(fmt.Sprintf("%s[%v]", "LocaleValues", i), err.(request.ErrInvalidParams)) } } } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetActionsGuarded sets the ActionsGuarded field's value. func (s *QualificationRequirement) SetActionsGuarded(v string) *QualificationRequirement { s.ActionsGuarded = &v return s } // SetComparator sets the Comparator field's value. func (s *QualificationRequirement) SetComparator(v string) *QualificationRequirement { s.Comparator = &v return s } // SetIntegerValues sets the IntegerValues field's value. func (s *QualificationRequirement) SetIntegerValues(v []*int64) *QualificationRequirement { s.IntegerValues = v return s } // SetLocaleValues sets the LocaleValues field's value. func (s *QualificationRequirement) SetLocaleValues(v []*Locale) *QualificationRequirement { s.LocaleValues = v return s } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *QualificationRequirement) SetQualificationTypeId(v string) *QualificationRequirement { s.QualificationTypeId = &v return s } // SetRequiredToPreview sets the RequiredToPreview field's value. func (s *QualificationRequirement) SetRequiredToPreview(v bool) *QualificationRequirement { s.RequiredToPreview = &v return s } // The QualificationType data structure represents a Qualification type, a description // of a property of a Worker that must match the requirements of a HIT for the // Worker to be able to accept the HIT. The type also describes how a Worker // can obtain a Qualification of that type, such as through a Qualification // test. type QualificationType struct { _ struct{} `type:"structure"` // The answers to the Qualification test specified in the Test parameter. AnswerKey *string `type:"string"` // Specifies that requests for the Qualification type are granted immediately, // without prompting the Worker with a Qualification test. Valid values are // True | False. AutoGranted *bool `type:"boolean"` // The Qualification integer value to use for automatically granted Qualifications, // if AutoGranted is true. This is 1 by default. AutoGrantedValue *int64 `type:"integer"` // The date and time the Qualification type was created. CreationTime *time.Time `type:"timestamp"` // A long description for the Qualification type. Description *string `type:"string"` // Specifies whether the Qualification type is one that a user can request through // the Amazon Mechanical Turk web site, such as by taking a Qualification test. // This value is False for Qualifications assigned automatically by the system. // Valid values are True | False. IsRequestable *bool `type:"boolean"` // One or more words or phrases that describe theQualification type, separated // by commas. The Keywords make the type easier to find using a search. Keywords *string `type:"string"` // The name of the Qualification type. The type name is used to identify the // type, and to find the type using a Qualification type search. Name *string `type:"string"` // A unique identifier for the Qualification type. A Qualification type is given // a Qualification type ID when you call the CreateQualificationType operation. QualificationTypeId *string `min:"1" type:"string"` // The status of the Qualification type. A Qualification type's status determines // if users can apply to receive a Qualification of this type, and if HITs can // be created with requirements based on this type. Valid values are Active // | Inactive. QualificationTypeStatus *string `type:"string" enum:"QualificationTypeStatus"` // The amount of time, in seconds, Workers must wait after taking the Qualification // test before they can take it again. Workers can take a Qualification test // multiple times if they were not granted the Qualification from a previous // attempt, or if the test offers a gradient score and they want a better score. // If not specified, retries are disabled and Workers can request a Qualification // only once. RetryDelayInSeconds *int64 `type:"long"` // The questions for a Qualification test associated with this Qualification // type that a user can take to obtain a Qualification of this type. This parameter // must be specified if AnswerKey is present. A Qualification type cannot have // both a specified Test parameter and an AutoGranted value of true. Test *string `type:"string"` // The amount of time, in seconds, given to a Worker to complete the Qualification // test, beginning from the time the Worker requests the Qualification. TestDurationInSeconds *int64 `type:"long"` } // String returns the string representation func (s QualificationType) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s QualificationType) GoString() string { return s.String() } // SetAnswerKey sets the AnswerKey field's value. func (s *QualificationType) SetAnswerKey(v string) *QualificationType { s.AnswerKey = &v return s } // SetAutoGranted sets the AutoGranted field's value. func (s *QualificationType) SetAutoGranted(v bool) *QualificationType { s.AutoGranted = &v return s } // SetAutoGrantedValue sets the AutoGrantedValue field's value. func (s *QualificationType) SetAutoGrantedValue(v int64) *QualificationType { s.AutoGrantedValue = &v return s } // SetCreationTime sets the CreationTime field's value. func (s *QualificationType) SetCreationTime(v time.Time) *QualificationType { s.CreationTime = &v return s } // SetDescription sets the Description field's value. func (s *QualificationType) SetDescription(v string) *QualificationType { s.Description = &v return s } // SetIsRequestable sets the IsRequestable field's value. func (s *QualificationType) SetIsRequestable(v bool) *QualificationType { s.IsRequestable = &v return s } // SetKeywords sets the Keywords field's value. func (s *QualificationType) SetKeywords(v string) *QualificationType { s.Keywords = &v return s } // SetName sets the Name field's value. func (s *QualificationType) SetName(v string) *QualificationType { s.Name = &v return s } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *QualificationType) SetQualificationTypeId(v string) *QualificationType { s.QualificationTypeId = &v return s } // SetQualificationTypeStatus sets the QualificationTypeStatus field's value. func (s *QualificationType) SetQualificationTypeStatus(v string) *QualificationType { s.QualificationTypeStatus = &v return s } // SetRetryDelayInSeconds sets the RetryDelayInSeconds field's value. func (s *QualificationType) SetRetryDelayInSeconds(v int64) *QualificationType { s.RetryDelayInSeconds = &v return s } // SetTest sets the Test field's value. func (s *QualificationType) SetTest(v string) *QualificationType { s.Test = &v return s } // SetTestDurationInSeconds sets the TestDurationInSeconds field's value. func (s *QualificationType) SetTestDurationInSeconds(v int64) *QualificationType { s.TestDurationInSeconds = &v return s } type RejectAssignmentInput struct { _ struct{} `type:"structure"` // The ID of the assignment. The assignment must correspond to a HIT created // by the Requester. // // AssignmentId is a required field AssignmentId *string `min:"1" type:"string" required:"true"` // A message for the Worker, which the Worker can see in the Status section // of the web site. // // RequesterFeedback is a required field RequesterFeedback *string `type:"string" required:"true"` } // String returns the string representation func (s RejectAssignmentInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s RejectAssignmentInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *RejectAssignmentInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "RejectAssignmentInput"} if s.AssignmentId == nil { invalidParams.Add(request.NewErrParamRequired("AssignmentId")) } if s.AssignmentId != nil && len(*s.AssignmentId) < 1 { invalidParams.Add(request.NewErrParamMinLen("AssignmentId", 1)) } if s.RequesterFeedback == nil { invalidParams.Add(request.NewErrParamRequired("RequesterFeedback")) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAssignmentId sets the AssignmentId field's value. func (s *RejectAssignmentInput) SetAssignmentId(v string) *RejectAssignmentInput { s.AssignmentId = &v return s } // SetRequesterFeedback sets the RequesterFeedback field's value. func (s *RejectAssignmentInput) SetRequesterFeedback(v string) *RejectAssignmentInput { s.RequesterFeedback = &v return s } type RejectAssignmentOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s RejectAssignmentOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s RejectAssignmentOutput) GoString() string { return s.String() } type RejectQualificationRequestInput struct { _ struct{} `type:"structure"` // The ID of the Qualification request, as returned by the ListQualificationRequests // operation. // // QualificationRequestId is a required field QualificationRequestId *string `type:"string" required:"true"` // A text message explaining why the request was rejected, to be shown to the // Worker who made the request. Reason *string `type:"string"` } // String returns the string representation func (s RejectQualificationRequestInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s RejectQualificationRequestInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *RejectQualificationRequestInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "RejectQualificationRequestInput"} if s.QualificationRequestId == nil { invalidParams.Add(request.NewErrParamRequired("QualificationRequestId")) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetQualificationRequestId sets the QualificationRequestId field's value. func (s *RejectQualificationRequestInput) SetQualificationRequestId(v string) *RejectQualificationRequestInput { s.QualificationRequestId = &v return s } // SetReason sets the Reason field's value. func (s *RejectQualificationRequestInput) SetReason(v string) *RejectQualificationRequestInput { s.Reason = &v return s } type RejectQualificationRequestOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s RejectQualificationRequestOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s RejectQualificationRequestOutput) GoString() string { return s.String() } // Your request is invalid. type RequestError struct { _ struct{} `type:"structure"` RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"` Message_ *string `locationName:"Message" type:"string"` TurkErrorCode *string `type:"string"` } // String returns the string representation func (s RequestError) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s RequestError) GoString() string { return s.String() } func newErrorRequestError(v protocol.ResponseMetadata) error { return &RequestError{ RespMetadata: v, } } // Code returns the exception type name. func (s *RequestError) Code() string { return "RequestError" } // Message returns the exception's message. func (s *RequestError) Message() string { if s.Message_ != nil { return *s.Message_ } return "" } // OrigErr always returns nil, satisfies awserr.Error interface. func (s *RequestError) OrigErr() error { return nil } func (s *RequestError) Error() string { return fmt.Sprintf("%s: %s\n%s", s.Code(), s.Message(), s.String()) } // Status code returns the HTTP status code for the request's response error. func (s *RequestError) StatusCode() int { return s.RespMetadata.StatusCode } // RequestID returns the service's response RequestID for request. func (s *RequestError) RequestID() string { return s.RespMetadata.RequestID } // Both the AssignmentReviewReport and the HITReviewReport elements contains // the ReviewActionDetail data structure. This structure is returned multiple // times for each action specified in the Review Policy. type ReviewActionDetail struct { _ struct{} `type:"structure"` // The unique identifier for the action. ActionId *string `min:"1" type:"string"` // The nature of the action itself. The Review Policy is responsible for examining // the HIT and Assignments, emitting results, and deciding which other actions // will be necessary. ActionName *string `type:"string"` // The date when the action was completed. CompleteTime *time.Time `type:"timestamp"` // Present only when the Results have a FAILED Status. ErrorCode *string `type:"string"` // A description of the outcome of the review. Result *string `type:"string"` // The current disposition of the action: INTENDED, SUCCEEDED, FAILED, or CANCELLED. Status *string `type:"string" enum:"ReviewActionStatus"` // The specific HITId or AssignmentID targeted by the action. TargetId *string `min:"1" type:"string"` // The type of object in TargetId. TargetType *string `type:"string"` } // String returns the string representation func (s ReviewActionDetail) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ReviewActionDetail) GoString() string { return s.String() } // SetActionId sets the ActionId field's value. func (s *ReviewActionDetail) SetActionId(v string) *ReviewActionDetail { s.ActionId = &v return s } // SetActionName sets the ActionName field's value. func (s *ReviewActionDetail) SetActionName(v string) *ReviewActionDetail { s.ActionName = &v return s } // SetCompleteTime sets the CompleteTime field's value. func (s *ReviewActionDetail) SetCompleteTime(v time.Time) *ReviewActionDetail { s.CompleteTime = &v return s } // SetErrorCode sets the ErrorCode field's value. func (s *ReviewActionDetail) SetErrorCode(v string) *ReviewActionDetail { s.ErrorCode = &v return s } // SetResult sets the Result field's value. func (s *ReviewActionDetail) SetResult(v string) *ReviewActionDetail { s.Result = &v return s } // SetStatus sets the Status field's value. func (s *ReviewActionDetail) SetStatus(v string) *ReviewActionDetail { s.Status = &v return s } // SetTargetId sets the TargetId field's value. func (s *ReviewActionDetail) SetTargetId(v string) *ReviewActionDetail { s.TargetId = &v return s } // SetTargetType sets the TargetType field's value. func (s *ReviewActionDetail) SetTargetType(v string) *ReviewActionDetail { s.TargetType = &v return s } // HIT Review Policy data structures represent HIT review policies, which you // specify when you create a HIT. type ReviewPolicy struct { _ struct{} `type:"structure"` // Name of the parameter from the Review policy. Parameters []*PolicyParameter `type:"list"` // Name of a Review Policy: SimplePlurality/2011-09-01 or ScoreMyKnownAnswers/2011-09-01 // // PolicyName is a required field PolicyName *string `type:"string" required:"true"` } // String returns the string representation func (s ReviewPolicy) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ReviewPolicy) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *ReviewPolicy) Validate() error { invalidParams := request.ErrInvalidParams{Context: "ReviewPolicy"} if s.PolicyName == nil { invalidParams.Add(request.NewErrParamRequired("PolicyName")) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetParameters sets the Parameters field's value. func (s *ReviewPolicy) SetParameters(v []*PolicyParameter) *ReviewPolicy { s.Parameters = v return s } // SetPolicyName sets the PolicyName field's value. func (s *ReviewPolicy) SetPolicyName(v string) *ReviewPolicy { s.PolicyName = &v return s } // Contains both ReviewResult and ReviewAction elements for a particular HIT. type ReviewReport struct { _ struct{} `type:"structure"` // A list of ReviewAction objects for each action specified in the Review Policy. ReviewActions []*ReviewActionDetail `type:"list"` // A list of ReviewResults objects for each action specified in the Review Policy. ReviewResults []*ReviewResultDetail `type:"list"` } // String returns the string representation func (s ReviewReport) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ReviewReport) GoString() string { return s.String() } // SetReviewActions sets the ReviewActions field's value. func (s *ReviewReport) SetReviewActions(v []*ReviewActionDetail) *ReviewReport { s.ReviewActions = v return s } // SetReviewResults sets the ReviewResults field's value. func (s *ReviewReport) SetReviewResults(v []*ReviewResultDetail) *ReviewReport { s.ReviewResults = v return s } // This data structure is returned multiple times for each result specified // in the Review Policy. type ReviewResultDetail struct { _ struct{} `type:"structure"` // A unique identifier of the Review action result. ActionId *string `min:"1" type:"string"` // Key identifies the particular piece of reviewed information. Key *string `type:"string"` // Specifies the QuestionId the result is describing. Depending on whether the // TargetType is a HIT or Assignment this results could specify multiple values. // If TargetType is HIT and QuestionId is absent, then the result describes // results of the HIT, including the HIT agreement score. If ObjectType is Assignment // and QuestionId is absent, then the result describes the Worker's performance // on the HIT. QuestionId *string `min:"1" type:"string"` // The HITID or AssignmentId about which this result was taken. Note that HIT-level // Review Policies will often emit results about both the HIT itself and its // Assignments, while Assignment-level review policies generally only emit results // about the Assignment itself. SubjectId *string `min:"1" type:"string"` // The type of the object from the SubjectId field. SubjectType *string `type:"string"` // The values of Key provided by the review policies you have selected. Value *string `type:"string"` } // String returns the string representation func (s ReviewResultDetail) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ReviewResultDetail) GoString() string { return s.String() } // SetActionId sets the ActionId field's value. func (s *ReviewResultDetail) SetActionId(v string) *ReviewResultDetail { s.ActionId = &v return s } // SetKey sets the Key field's value. func (s *ReviewResultDetail) SetKey(v string) *ReviewResultDetail { s.Key = &v return s } // SetQuestionId sets the QuestionId field's value. func (s *ReviewResultDetail) SetQuestionId(v string) *ReviewResultDetail { s.QuestionId = &v return s } // SetSubjectId sets the SubjectId field's value. func (s *ReviewResultDetail) SetSubjectId(v string) *ReviewResultDetail { s.SubjectId = &v return s } // SetSubjectType sets the SubjectType field's value. func (s *ReviewResultDetail) SetSubjectType(v string) *ReviewResultDetail { s.SubjectType = &v return s } // SetValue sets the Value field's value. func (s *ReviewResultDetail) SetValue(v string) *ReviewResultDetail { s.Value = &v return s } type SendBonusInput struct { _ struct{} `type:"structure"` // The ID of the assignment for which this bonus is paid. // // AssignmentId is a required field AssignmentId *string `min:"1" type:"string" required:"true"` // The Bonus amount is a US Dollar amount specified using a string (for example, // "5" represents $5.00 USD and "101.42" represents $101.42 USD). Do not include // currency symbols or currency codes. // // BonusAmount is a required field BonusAmount *string `type:"string" required:"true"` // A message that explains the reason for the bonus payment. The Worker receiving // the bonus can see this message. // // Reason is a required field Reason *string `type:"string" required:"true"` // A unique identifier for this request, which allows you to retry the call // on error without granting multiple bonuses. This is useful in cases such // as network timeouts where it is unclear whether or not the call succeeded // on the server. If the bonus already exists in the system from a previous // call using the same UniqueRequestToken, subsequent calls will return an error // with a message containing the request ID. UniqueRequestToken *string `min:"1" type:"string"` // The ID of the Worker being paid the bonus. // // WorkerId is a required field WorkerId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s SendBonusInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s SendBonusInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *SendBonusInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "SendBonusInput"} if s.AssignmentId == nil { invalidParams.Add(request.NewErrParamRequired("AssignmentId")) } if s.AssignmentId != nil && len(*s.AssignmentId) < 1 { invalidParams.Add(request.NewErrParamMinLen("AssignmentId", 1)) } if s.BonusAmount == nil { invalidParams.Add(request.NewErrParamRequired("BonusAmount")) } if s.Reason == nil { invalidParams.Add(request.NewErrParamRequired("Reason")) } if s.UniqueRequestToken != nil && len(*s.UniqueRequestToken) < 1 { invalidParams.Add(request.NewErrParamMinLen("UniqueRequestToken", 1)) } if s.WorkerId == nil { invalidParams.Add(request.NewErrParamRequired("WorkerId")) } if s.WorkerId != nil && len(*s.WorkerId) < 1 { invalidParams.Add(request.NewErrParamMinLen("WorkerId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAssignmentId sets the AssignmentId field's value. func (s *SendBonusInput) SetAssignmentId(v string) *SendBonusInput { s.AssignmentId = &v return s } // SetBonusAmount sets the BonusAmount field's value. func (s *SendBonusInput) SetBonusAmount(v string) *SendBonusInput { s.BonusAmount = &v return s } // SetReason sets the Reason field's value. func (s *SendBonusInput) SetReason(v string) *SendBonusInput { s.Reason = &v return s } // SetUniqueRequestToken sets the UniqueRequestToken field's value. func (s *SendBonusInput) SetUniqueRequestToken(v string) *SendBonusInput { s.UniqueRequestToken = &v return s } // SetWorkerId sets the WorkerId field's value. func (s *SendBonusInput) SetWorkerId(v string) *SendBonusInput { s.WorkerId = &v return s } type SendBonusOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s SendBonusOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s SendBonusOutput) GoString() string { return s.String() } type SendTestEventNotificationInput struct { _ struct{} `type:"structure"` // The notification specification to test. This value is identical to the value // you would provide to the UpdateNotificationSettings operation when you establish // the notification specification for a HIT type. // // Notification is a required field Notification *NotificationSpecification `type:"structure" required:"true"` // The event to simulate to test the notification specification. This event // is included in the test message even if the notification specification does // not include the event type. The notification specification does not filter // out the test event. // // TestEventType is a required field TestEventType *string `type:"string" required:"true" enum:"EventType"` } // String returns the string representation func (s SendTestEventNotificationInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s SendTestEventNotificationInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *SendTestEventNotificationInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "SendTestEventNotificationInput"} if s.Notification == nil { invalidParams.Add(request.NewErrParamRequired("Notification")) } if s.TestEventType == nil { invalidParams.Add(request.NewErrParamRequired("TestEventType")) } if s.Notification != nil { if err := s.Notification.Validate(); err != nil { invalidParams.AddNested("Notification", err.(request.ErrInvalidParams)) } } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetNotification sets the Notification field's value. func (s *SendTestEventNotificationInput) SetNotification(v *NotificationSpecification) *SendTestEventNotificationInput { s.Notification = v return s } // SetTestEventType sets the TestEventType field's value. func (s *SendTestEventNotificationInput) SetTestEventType(v string) *SendTestEventNotificationInput { s.TestEventType = &v return s } type SendTestEventNotificationOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s SendTestEventNotificationOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s SendTestEventNotificationOutput) GoString() string { return s.String() } // Amazon Mechanical Turk is temporarily unable to process your request. Try // your call again. type ServiceFault struct { _ struct{} `type:"structure"` RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"` Message_ *string `locationName:"Message" type:"string"` TurkErrorCode *string `type:"string"` } // String returns the string representation func (s ServiceFault) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s ServiceFault) GoString() string { return s.String() } func newErrorServiceFault(v protocol.ResponseMetadata) error { return &ServiceFault{ RespMetadata: v, } } // Code returns the exception type name. func (s *ServiceFault) Code() string { return "ServiceFault" } // Message returns the exception's message. func (s *ServiceFault) Message() string { if s.Message_ != nil { return *s.Message_ } return "" } // OrigErr always returns nil, satisfies awserr.Error interface. func (s *ServiceFault) OrigErr() error { return nil } func (s *ServiceFault) Error() string { return fmt.Sprintf("%s: %s\n%s", s.Code(), s.Message(), s.String()) } // Status code returns the HTTP status code for the request's response error. func (s *ServiceFault) StatusCode() int { return s.RespMetadata.StatusCode } // RequestID returns the service's response RequestID for request. func (s *ServiceFault) RequestID() string { return s.RespMetadata.RequestID } type UpdateExpirationForHITInput struct { _ struct{} `type:"structure"` // The date and time at which you want the HIT to expire // // ExpireAt is a required field ExpireAt *time.Time `type:"timestamp" required:"true"` // The HIT to update. // // HITId is a required field HITId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s UpdateExpirationForHITInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s UpdateExpirationForHITInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *UpdateExpirationForHITInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "UpdateExpirationForHITInput"} if s.ExpireAt == nil { invalidParams.Add(request.NewErrParamRequired("ExpireAt")) } if s.HITId == nil { invalidParams.Add(request.NewErrParamRequired("HITId")) } if s.HITId != nil && len(*s.HITId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetExpireAt sets the ExpireAt field's value. func (s *UpdateExpirationForHITInput) SetExpireAt(v time.Time) *UpdateExpirationForHITInput { s.ExpireAt = &v return s } // SetHITId sets the HITId field's value. func (s *UpdateExpirationForHITInput) SetHITId(v string) *UpdateExpirationForHITInput { s.HITId = &v return s } type UpdateExpirationForHITOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s UpdateExpirationForHITOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s UpdateExpirationForHITOutput) GoString() string { return s.String() } type UpdateHITReviewStatusInput struct { _ struct{} `type:"structure"` // The ID of the HIT to update. // // HITId is a required field HITId *string `min:"1" type:"string" required:"true"` // Specifies how to update the HIT status. Default is False. // // * Setting this to false will only transition a HIT from Reviewable to // Reviewing // // * Setting this to true will only transition a HIT from Reviewing to Reviewable Revert *bool `type:"boolean"` } // String returns the string representation func (s UpdateHITReviewStatusInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s UpdateHITReviewStatusInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *UpdateHITReviewStatusInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "UpdateHITReviewStatusInput"} if s.HITId == nil { invalidParams.Add(request.NewErrParamRequired("HITId")) } if s.HITId != nil && len(*s.HITId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetHITId sets the HITId field's value. func (s *UpdateHITReviewStatusInput) SetHITId(v string) *UpdateHITReviewStatusInput { s.HITId = &v return s } // SetRevert sets the Revert field's value. func (s *UpdateHITReviewStatusInput) SetRevert(v bool) *UpdateHITReviewStatusInput { s.Revert = &v return s } type UpdateHITReviewStatusOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s UpdateHITReviewStatusOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s UpdateHITReviewStatusOutput) GoString() string { return s.String() } type UpdateHITTypeOfHITInput struct { _ struct{} `type:"structure"` // The HIT to update. // // HITId is a required field HITId *string `min:"1" type:"string" required:"true"` // The ID of the new HIT type. // // HITTypeId is a required field HITTypeId *string `min:"1" type:"string" required:"true"` } // String returns the string representation func (s UpdateHITTypeOfHITInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s UpdateHITTypeOfHITInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *UpdateHITTypeOfHITInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "UpdateHITTypeOfHITInput"} if s.HITId == nil { invalidParams.Add(request.NewErrParamRequired("HITId")) } if s.HITId != nil && len(*s.HITId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITId", 1)) } if s.HITTypeId == nil { invalidParams.Add(request.NewErrParamRequired("HITTypeId")) } if s.HITTypeId != nil && len(*s.HITTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITTypeId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetHITId sets the HITId field's value. func (s *UpdateHITTypeOfHITInput) SetHITId(v string) *UpdateHITTypeOfHITInput { s.HITId = &v return s } // SetHITTypeId sets the HITTypeId field's value. func (s *UpdateHITTypeOfHITInput) SetHITTypeId(v string) *UpdateHITTypeOfHITInput { s.HITTypeId = &v return s } type UpdateHITTypeOfHITOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s UpdateHITTypeOfHITOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s UpdateHITTypeOfHITOutput) GoString() string { return s.String() } type UpdateNotificationSettingsInput struct { _ struct{} `type:"structure"` // Specifies whether notifications are sent for HITs of this HIT type, according // to the notification specification. You must specify either the Notification // parameter or the Active parameter for the call to UpdateNotificationSettings // to succeed. Active *bool `type:"boolean"` // The ID of the HIT type whose notification specification is being updated. // // HITTypeId is a required field HITTypeId *string `min:"1" type:"string" required:"true"` // The notification specification for the HIT type. Notification *NotificationSpecification `type:"structure"` } // String returns the string representation func (s UpdateNotificationSettingsInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s UpdateNotificationSettingsInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *UpdateNotificationSettingsInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "UpdateNotificationSettingsInput"} if s.HITTypeId == nil { invalidParams.Add(request.NewErrParamRequired("HITTypeId")) } if s.HITTypeId != nil && len(*s.HITTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("HITTypeId", 1)) } if s.Notification != nil { if err := s.Notification.Validate(); err != nil { invalidParams.AddNested("Notification", err.(request.ErrInvalidParams)) } } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetActive sets the Active field's value. func (s *UpdateNotificationSettingsInput) SetActive(v bool) *UpdateNotificationSettingsInput { s.Active = &v return s } // SetHITTypeId sets the HITTypeId field's value. func (s *UpdateNotificationSettingsInput) SetHITTypeId(v string) *UpdateNotificationSettingsInput { s.HITTypeId = &v return s } // SetNotification sets the Notification field's value. func (s *UpdateNotificationSettingsInput) SetNotification(v *NotificationSpecification) *UpdateNotificationSettingsInput { s.Notification = v return s } type UpdateNotificationSettingsOutput struct { _ struct{} `type:"structure"` } // String returns the string representation func (s UpdateNotificationSettingsOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s UpdateNotificationSettingsOutput) GoString() string { return s.String() } type UpdateQualificationTypeInput struct { _ struct{} `type:"structure"` // The answers to the Qualification test specified in the Test parameter, in // the form of an AnswerKey data structure. AnswerKey *string `type:"string"` // Specifies whether requests for the Qualification type are granted immediately, // without prompting the Worker with a Qualification test. // // Constraints: If the Test parameter is specified, this parameter cannot be // true. AutoGranted *bool `type:"boolean"` // The Qualification value to use for automatically granted Qualifications. // This parameter is used only if the AutoGranted parameter is true. AutoGrantedValue *int64 `type:"integer"` // The new description of the Qualification type. Description *string `type:"string"` // The ID of the Qualification type to update. // // QualificationTypeId is a required field QualificationTypeId *string `min:"1" type:"string" required:"true"` // The new status of the Qualification type - Active | Inactive QualificationTypeStatus *string `type:"string" enum:"QualificationTypeStatus"` // The amount of time, in seconds, that Workers must wait after requesting a // Qualification of the specified Qualification type before they can retry the // Qualification request. It is not possible to disable retries for a Qualification // type after it has been created with retries enabled. If you want to disable // retries, you must dispose of the existing retry-enabled Qualification type // using DisposeQualificationType and then create a new Qualification type with // retries disabled using CreateQualificationType. RetryDelayInSeconds *int64 `type:"long"` // The questions for the Qualification test a Worker must answer correctly to // obtain a Qualification of this type. If this parameter is specified, TestDurationInSeconds // must also be specified. // // Constraints: Must not be longer than 65535 bytes. Must be a QuestionForm // data structure. This parameter cannot be specified if AutoGranted is true. // // Constraints: None. If not specified, the Worker may request the Qualification // without answering any questions. Test *string `type:"string"` // The number of seconds the Worker has to complete the Qualification test, // starting from the time the Worker requests the Qualification. TestDurationInSeconds *int64 `type:"long"` } // String returns the string representation func (s UpdateQualificationTypeInput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s UpdateQualificationTypeInput) GoString() string { return s.String() } // Validate inspects the fields of the type to determine if they are valid. func (s *UpdateQualificationTypeInput) Validate() error { invalidParams := request.ErrInvalidParams{Context: "UpdateQualificationTypeInput"} if s.QualificationTypeId == nil { invalidParams.Add(request.NewErrParamRequired("QualificationTypeId")) } if s.QualificationTypeId != nil && len(*s.QualificationTypeId) < 1 { invalidParams.Add(request.NewErrParamMinLen("QualificationTypeId", 1)) } if invalidParams.Len() > 0 { return invalidParams } return nil } // SetAnswerKey sets the AnswerKey field's value. func (s *UpdateQualificationTypeInput) SetAnswerKey(v string) *UpdateQualificationTypeInput { s.AnswerKey = &v return s } // SetAutoGranted sets the AutoGranted field's value. func (s *UpdateQualificationTypeInput) SetAutoGranted(v bool) *UpdateQualificationTypeInput { s.AutoGranted = &v return s } // SetAutoGrantedValue sets the AutoGrantedValue field's value. func (s *UpdateQualificationTypeInput) SetAutoGrantedValue(v int64) *UpdateQualificationTypeInput { s.AutoGrantedValue = &v return s } // SetDescription sets the Description field's value. func (s *UpdateQualificationTypeInput) SetDescription(v string) *UpdateQualificationTypeInput { s.Description = &v return s } // SetQualificationTypeId sets the QualificationTypeId field's value. func (s *UpdateQualificationTypeInput) SetQualificationTypeId(v string) *UpdateQualificationTypeInput { s.QualificationTypeId = &v return s } // SetQualificationTypeStatus sets the QualificationTypeStatus field's value. func (s *UpdateQualificationTypeInput) SetQualificationTypeStatus(v string) *UpdateQualificationTypeInput { s.QualificationTypeStatus = &v return s } // SetRetryDelayInSeconds sets the RetryDelayInSeconds field's value. func (s *UpdateQualificationTypeInput) SetRetryDelayInSeconds(v int64) *UpdateQualificationTypeInput { s.RetryDelayInSeconds = &v return s } // SetTest sets the Test field's value. func (s *UpdateQualificationTypeInput) SetTest(v string) *UpdateQualificationTypeInput { s.Test = &v return s } // SetTestDurationInSeconds sets the TestDurationInSeconds field's value. func (s *UpdateQualificationTypeInput) SetTestDurationInSeconds(v int64) *UpdateQualificationTypeInput { s.TestDurationInSeconds = &v return s } type UpdateQualificationTypeOutput struct { _ struct{} `type:"structure"` // Contains a QualificationType data structure. QualificationType *QualificationType `type:"structure"` } // String returns the string representation func (s UpdateQualificationTypeOutput) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s UpdateQualificationTypeOutput) GoString() string { return s.String() } // SetQualificationType sets the QualificationType field's value. func (s *UpdateQualificationTypeOutput) SetQualificationType(v *QualificationType) *UpdateQualificationTypeOutput { s.QualificationType = v return s } // The WorkerBlock data structure represents a Worker who has been blocked. // It has two elements: the WorkerId and the Reason for the block. type WorkerBlock struct { _ struct{} `type:"structure"` // A message explaining the reason the Worker was blocked. Reason *string `type:"string"` // The ID of the Worker who accepted the HIT. WorkerId *string `min:"1" type:"string"` } // String returns the string representation func (s WorkerBlock) String() string { return awsutil.Prettify(s) } // GoString returns the string representation func (s WorkerBlock) GoString() string { return s.String() } // SetReason sets the Reason field's value. func (s *WorkerBlock) SetReason(v string) *WorkerBlock { s.Reason = &v return s } // SetWorkerId sets the WorkerId field's value. func (s *WorkerBlock) SetWorkerId(v string) *WorkerBlock { s.WorkerId = &v return s } const ( // AssignmentStatusSubmitted is a AssignmentStatus enum value AssignmentStatusSubmitted = "Submitted" // AssignmentStatusApproved is a AssignmentStatus enum value AssignmentStatusApproved = "Approved" // AssignmentStatusRejected is a AssignmentStatus enum value AssignmentStatusRejected = "Rejected" ) // AssignmentStatus_Values returns all elements of the AssignmentStatus enum func AssignmentStatus_Values() []string {<|fim▁hole|> AssignmentStatusApproved, AssignmentStatusRejected, } } const ( // ComparatorLessThan is a Comparator enum value ComparatorLessThan = "LessThan" // ComparatorLessThanOrEqualTo is a Comparator enum value ComparatorLessThanOrEqualTo = "LessThanOrEqualTo" // ComparatorGreaterThan is a Comparator enum value ComparatorGreaterThan = "GreaterThan" // ComparatorGreaterThanOrEqualTo is a Comparator enum value ComparatorGreaterThanOrEqualTo = "GreaterThanOrEqualTo" // ComparatorEqualTo is a Comparator enum value ComparatorEqualTo = "EqualTo" // ComparatorNotEqualTo is a Comparator enum value ComparatorNotEqualTo = "NotEqualTo" // ComparatorExists is a Comparator enum value ComparatorExists = "Exists" // ComparatorDoesNotExist is a Comparator enum value ComparatorDoesNotExist = "DoesNotExist" // ComparatorIn is a Comparator enum value ComparatorIn = "In" // ComparatorNotIn is a Comparator enum value ComparatorNotIn = "NotIn" ) // Comparator_Values returns all elements of the Comparator enum func Comparator_Values() []string { return []string{ ComparatorLessThan, ComparatorLessThanOrEqualTo, ComparatorGreaterThan, ComparatorGreaterThanOrEqualTo, ComparatorEqualTo, ComparatorNotEqualTo, ComparatorExists, ComparatorDoesNotExist, ComparatorIn, ComparatorNotIn, } } const ( // EventTypeAssignmentAccepted is a EventType enum value EventTypeAssignmentAccepted = "AssignmentAccepted" // EventTypeAssignmentAbandoned is a EventType enum value EventTypeAssignmentAbandoned = "AssignmentAbandoned" // EventTypeAssignmentReturned is a EventType enum value EventTypeAssignmentReturned = "AssignmentReturned" // EventTypeAssignmentSubmitted is a EventType enum value EventTypeAssignmentSubmitted = "AssignmentSubmitted" // EventTypeAssignmentRejected is a EventType enum value EventTypeAssignmentRejected = "AssignmentRejected" // EventTypeAssignmentApproved is a EventType enum value EventTypeAssignmentApproved = "AssignmentApproved" // EventTypeHitcreated is a EventType enum value EventTypeHitcreated = "HITCreated" // EventTypeHitexpired is a EventType enum value EventTypeHitexpired = "HITExpired" // EventTypeHitreviewable is a EventType enum value EventTypeHitreviewable = "HITReviewable" // EventTypeHitextended is a EventType enum value EventTypeHitextended = "HITExtended" // EventTypeHitdisposed is a EventType enum value EventTypeHitdisposed = "HITDisposed" // EventTypePing is a EventType enum value EventTypePing = "Ping" ) // EventType_Values returns all elements of the EventType enum func EventType_Values() []string { return []string{ EventTypeAssignmentAccepted, EventTypeAssignmentAbandoned, EventTypeAssignmentReturned, EventTypeAssignmentSubmitted, EventTypeAssignmentRejected, EventTypeAssignmentApproved, EventTypeHitcreated, EventTypeHitexpired, EventTypeHitreviewable, EventTypeHitextended, EventTypeHitdisposed, EventTypePing, } } const ( // HITAccessActionsAccept is a HITAccessActions enum value HITAccessActionsAccept = "Accept" // HITAccessActionsPreviewAndAccept is a HITAccessActions enum value HITAccessActionsPreviewAndAccept = "PreviewAndAccept" // HITAccessActionsDiscoverPreviewAndAccept is a HITAccessActions enum value HITAccessActionsDiscoverPreviewAndAccept = "DiscoverPreviewAndAccept" ) // HITAccessActions_Values returns all elements of the HITAccessActions enum func HITAccessActions_Values() []string { return []string{ HITAccessActionsAccept, HITAccessActionsPreviewAndAccept, HITAccessActionsDiscoverPreviewAndAccept, } } const ( // HITReviewStatusNotReviewed is a HITReviewStatus enum value HITReviewStatusNotReviewed = "NotReviewed" // HITReviewStatusMarkedForReview is a HITReviewStatus enum value HITReviewStatusMarkedForReview = "MarkedForReview" // HITReviewStatusReviewedAppropriate is a HITReviewStatus enum value HITReviewStatusReviewedAppropriate = "ReviewedAppropriate" // HITReviewStatusReviewedInappropriate is a HITReviewStatus enum value HITReviewStatusReviewedInappropriate = "ReviewedInappropriate" ) // HITReviewStatus_Values returns all elements of the HITReviewStatus enum func HITReviewStatus_Values() []string { return []string{ HITReviewStatusNotReviewed, HITReviewStatusMarkedForReview, HITReviewStatusReviewedAppropriate, HITReviewStatusReviewedInappropriate, } } const ( // HITStatusAssignable is a HITStatus enum value HITStatusAssignable = "Assignable" // HITStatusUnassignable is a HITStatus enum value HITStatusUnassignable = "Unassignable" // HITStatusReviewable is a HITStatus enum value HITStatusReviewable = "Reviewable" // HITStatusReviewing is a HITStatus enum value HITStatusReviewing = "Reviewing" // HITStatusDisposed is a HITStatus enum value HITStatusDisposed = "Disposed" ) // HITStatus_Values returns all elements of the HITStatus enum func HITStatus_Values() []string { return []string{ HITStatusAssignable, HITStatusUnassignable, HITStatusReviewable, HITStatusReviewing, HITStatusDisposed, } } const ( // NotificationTransportEmail is a NotificationTransport enum value NotificationTransportEmail = "Email" // NotificationTransportSqs is a NotificationTransport enum value NotificationTransportSqs = "SQS" // NotificationTransportSns is a NotificationTransport enum value NotificationTransportSns = "SNS" ) // NotificationTransport_Values returns all elements of the NotificationTransport enum func NotificationTransport_Values() []string { return []string{ NotificationTransportEmail, NotificationTransportSqs, NotificationTransportSns, } } const ( // NotifyWorkersFailureCodeSoftFailure is a NotifyWorkersFailureCode enum value NotifyWorkersFailureCodeSoftFailure = "SoftFailure" // NotifyWorkersFailureCodeHardFailure is a NotifyWorkersFailureCode enum value NotifyWorkersFailureCodeHardFailure = "HardFailure" ) // NotifyWorkersFailureCode_Values returns all elements of the NotifyWorkersFailureCode enum func NotifyWorkersFailureCode_Values() []string { return []string{ NotifyWorkersFailureCodeSoftFailure, NotifyWorkersFailureCodeHardFailure, } } const ( // QualificationStatusGranted is a QualificationStatus enum value QualificationStatusGranted = "Granted" // QualificationStatusRevoked is a QualificationStatus enum value QualificationStatusRevoked = "Revoked" ) // QualificationStatus_Values returns all elements of the QualificationStatus enum func QualificationStatus_Values() []string { return []string{ QualificationStatusGranted, QualificationStatusRevoked, } } const ( // QualificationTypeStatusActive is a QualificationTypeStatus enum value QualificationTypeStatusActive = "Active" // QualificationTypeStatusInactive is a QualificationTypeStatus enum value QualificationTypeStatusInactive = "Inactive" ) // QualificationTypeStatus_Values returns all elements of the QualificationTypeStatus enum func QualificationTypeStatus_Values() []string { return []string{ QualificationTypeStatusActive, QualificationTypeStatusInactive, } } const ( // ReviewActionStatusIntended is a ReviewActionStatus enum value ReviewActionStatusIntended = "Intended" // ReviewActionStatusSucceeded is a ReviewActionStatus enum value ReviewActionStatusSucceeded = "Succeeded" // ReviewActionStatusFailed is a ReviewActionStatus enum value ReviewActionStatusFailed = "Failed" // ReviewActionStatusCancelled is a ReviewActionStatus enum value ReviewActionStatusCancelled = "Cancelled" ) // ReviewActionStatus_Values returns all elements of the ReviewActionStatus enum func ReviewActionStatus_Values() []string { return []string{ ReviewActionStatusIntended, ReviewActionStatusSucceeded, ReviewActionStatusFailed, ReviewActionStatusCancelled, } } const ( // ReviewPolicyLevelAssignment is a ReviewPolicyLevel enum value ReviewPolicyLevelAssignment = "Assignment" // ReviewPolicyLevelHit is a ReviewPolicyLevel enum value ReviewPolicyLevelHit = "HIT" ) // ReviewPolicyLevel_Values returns all elements of the ReviewPolicyLevel enum func ReviewPolicyLevel_Values() []string { return []string{ ReviewPolicyLevelAssignment, ReviewPolicyLevelHit, } } const ( // ReviewableHITStatusReviewable is a ReviewableHITStatus enum value ReviewableHITStatusReviewable = "Reviewable" // ReviewableHITStatusReviewing is a ReviewableHITStatus enum value ReviewableHITStatusReviewing = "Reviewing" ) // ReviewableHITStatus_Values returns all elements of the ReviewableHITStatus enum func ReviewableHITStatus_Values() []string { return []string{ ReviewableHITStatusReviewable, ReviewableHITStatusReviewing, } }<|fim▁end|>
return []string{ AssignmentStatusSubmitted,
<|file_name|>0002_auto_20170314_1712.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-03-14 17:12 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('volunteers', '0001_initial'), ] <|fim▁hole|> new_name='avatar', ), ]<|fim▁end|>
operations = [ migrations.RenameField( model_name='volunteer', old_name='picture',
<|file_name|>test_parse.py<|end_file_name|><|fim▁begin|>import asterix import unittest class AsterixParseTest(unittest.TestCase): def test_ParseCAT048(self): sample_filename = asterix.get_sample_file('cat048.raw') with open(sample_filename, "rb") as f: data = f.read() packet = asterix.parse(data) self.maxDiff = None self.assertIsNotNone(packet) self.assertIsNotNone(packet[0]) self.assertIs(len(packet), 1) self.assertTrue('I220' in packet[0]) self.assertEqual(packet[0]['category'], 48) self.assertEqual(packet[0]['len'], 45) self.assertEqual(packet[0]['crc'], 'C150ED0E') self.assertTrue('ts' in packet[0]) self.assertEqual(packet[0]['I220']['ACAddr']['val'], '3C660C') self.assertEqual(packet[0]['I220']['ACAddr']['desc'], 'AircraftAddress') self.assertEqual(packet[0]['I010'], {'SAC': {'desc': 'System Area Code', 'val': 25}, 'SIC': {'desc': 'System Identification Code', 'val': 201}}) self.assertEqual(packet[0]['I170'], {'GHO': {'desc': 'GHO', 'val': 0, 'meaning': 'True target track'}, 'TCC': {'desc': 'TCC', 'val': 0, 'meaning': 'Radar plane'}, 'RAD': {'desc': 'RAD', 'val': 2, 'meaning': 'SSR/ModeS Track'}, 'spare': {'desc': 'spare bits set to 0', 'const': 0, 'val': 0}, 'TRE': {'desc': 'TRE', 'val': 0, 'meaning': 'Track still alive'}, 'CDM': {'desc': 'CDM', 'val': 0, 'meaning': 'Maintaining'}, 'CNF': {'desc': 'CNF', 'val': 0, 'meaning': 'Confirmed Track'}, 'SUP': {'desc': 'SUP', 'val': 0, 'meaning': 'Track from cluster network - NO'}, 'FX': {'desc': 'FX', 'val': 0, 'meaning': 'End of Data Item'}, 'DOU': {'desc': 'DOU', 'val': 0, 'meaning': 'Normal confidence'}, 'MAH': {'desc': 'MAH', 'val': 0, 'meaning': 'No horizontal man. sensed'}}) self.assertEqual(packet[0]['I200'], {'CGS': {'desc': 'Calculated groundspeed', 'val': 434.94}, 'CHdg': {'desc': 'Calculated heading', 'val': 124.002685546875}}) self.assertEqual(packet[0]['I220'], {'ACAddr': {'desc': 'AircraftAddress', 'val': '3C660C'}}) self.assertEqual(packet[0]['I250'][0], {'TARGET_ALT_STATUS': {'desc': 'Status of Target ALT source bits', 'meaning': 'No source information provided', 'val': 0}, 'res': {'desc': 'Reserved', 'val': 0}, 'FMS_ALT': {'desc': 'FMS Selected Altitude', 'val': 0.0}, 'APP': {'desc': 'APPROACH Mode', 'meaning': 'Not active', 'val': 0}, 'ALT_HOLD': {'desc': 'ALT HOLD Mode', 'meaning': 'Not active', 'val': 0}, 'TARGET_ALT_SOURCE': {'desc': 'Target ALT source', 'meaning': 'Unknown', 'val': 0}, 'BDS': {'desc': 'BDS register', 'val': '40'}, 'FMS_ALT_STATUS': {'desc': 'FMS Altitude Status', 'val': 0}, 'BP_STATUS': {'desc': 'Barometric Pressure Status', 'val': 1}, 'BP': {'desc': 'Barometric Pressure', 'val': 227.0}, 'MODE_STATUS': {'desc': 'Status of MCP/FCU Mode Bits', 'val': 0}, 'VNAV': {'desc': 'VNAV Mode', 'meaning': 'Not active', 'val': 0}, 'MCP_ALT_STATUS': {'desc': 'MCP Altitude Status', 'val': 1}, 'MCP_ALT': {'desc': 'MCP/FCU Selected Altitude', 'val': 33008.0}}) self.assertEqual(packet[0]['I040'], {'THETA': {'desc': '', 'val': 340.13671875}, 'RHO': {'desc': '', 'max': 256.0, 'val': 197.68359375}}) self.assertEqual(packet[0]['I240'], {'TId': {'desc': 'Characters 1-8 (coded on 6 bits each) defining target identification', 'val': 'DLH65A '}}) self.assertEqual(packet[0]['I140'], {'ToD': {'desc': 'Time Of Day', 'val': 27354.6015625}}) self.assertEqual(packet[0]['I070'], {'Mode3A': {'desc': 'Mode-3/A reply code', 'val': '1000'}, 'V': {'desc': '', 'val': 0, 'meaning': 'Code validated'}, 'L': {'desc': '', 'val': 0, 'meaning': 'Mode-3/A code as derived from the reply of the transponder'}, 'spare': {'desc': 'spare bit set to 0', 'const': 0, 'val': 0}, 'G': {'desc': '', 'val': 0, 'meaning': 'Default'}}) self.assertEqual(packet[0]['I161'], {'Tn': {'desc': 'Track Number', 'val': 3563}}) self.assertEqual(packet[0]['I020'], {'SIM': {'desc': 'SIM', 'val': 0, 'meaning': 'Actual target report'}, 'TYP': {'desc': 'TYP', 'val': 5, 'meaning': 'Single ModeS Roll-Call'}, 'RAB': {'desc': 'RAB', 'val': 0, 'meaning': 'Report from aircraft transponder'}, 'RDP': {'desc': 'RDP', 'val': 0, 'meaning': 'Report from RDP Chain 1'}, 'FX': {'desc': 'FX', 'val': 0, 'meaning': 'End of Data Item'}, 'SPI': {'desc': 'SPI', 'val': 0, 'meaning': 'Absence of SPI'}}) self.assertEqual(packet[0]['I090'], {'V': {'desc': '', 'val': 0, 'meaning': 'Code validated'}, 'FL': {'desc': 'FlightLevel', 'val': 330.0}, 'G': {'desc': '', 'val': 0, 'meaning': 'Default'}}) self.assertEqual(packet[0]['I230'], {'COM': {'desc': 'COM', 'val': 1}, 'BDS37': {'desc': 'BDS 1,0 bits 37/40', 'val': 5}, 'ModeSSSC': {'desc': 'ModeS Specific Service Capability', 'val': 1, 'meaning': 'Yes'}, 'STAT': {'desc': 'STAT', 'val': 0}, 'AIC': {'desc': 'Aircraft identification capability', 'val': 1, 'meaning': 'Yes'}, 'BDS16': {'desc': 'BDS 1,0 bit 16', 'val': 1}, 'spare': {'desc': 'spare bit set to 0', 'const': 0, 'val': 0}, 'ARC': {'desc': 'Altitude reporting capability', 'val': 1, 'meaning': '25ft resolution'}, 'SI': {'desc': 'SI/II Transponder Capability', 'val': 0, 'meaning': 'SI-Code Capable'}}) def test_ParseCAT062CAT065(self): sample_filename = asterix.get_sample_file('cat062cat065.raw') with open(sample_filename, "rb") as f: data = f.read() packet = asterix.parse(data) self.assertIsNotNone(packet) self.assertIsNotNone(packet[0]) self.assertIs(len(data), 195) self.assertIs(len(packet), 3) self.assertIs(packet[0]['category'], 62) self.assertIs(packet[0]['len'], 66) self.assertEqual(packet[0]['crc'], '9CB473BE') self.assertIs(packet[1]['category'], 62) self.assertIs(packet[1]['len'], 114) self.assertEqual(packet[1]['crc'], '5A6E1F96') self.assertIs(packet[2]['category'], 65) self.assertIs(packet[2]['len'], 9) self.assertEqual(packet[2]['crc'], '8B7DA47A') self.assertEqual(packet[0]['I220'], {'RoC': {'val': -443.75, 'desc': 'Rate of Climb/Descent'}}) self.assertEqual(packet[0]['I015'], {'SID': {'val': 4, 'desc': 'Service Identification'}}) self.assertEqual(packet[0]['I290']['MDS'], {'MDS': {'val': 63.75, 'desc': 'Age of the last Mode S detection used to update the track'}}) self.assertEqual(packet[0]['I290']['PSR'], {'PSR': {'val': 7.25, 'desc': 'Age of the last primary detection used to update the track'}}) self.assertEqual(packet[0]['I290']['SSR'], {'SSR': {'val': 0.0, 'desc': 'Age of the last secondary detection used to update the track'}}) self.assertEqual(packet[0]['I135'], { 'QNH': {'meaning': 'No QNH correction applied', 'val': 0, 'desc': 'QNH'}, 'CTBA': {'max': 150000.0, 'min': -1500.0, 'val': 15700.0, 'desc': 'Calculated Track Barometric Alt'}}) self.assertEqual(packet[0]['I136'], { 'MFL': {'max': 150000.0, 'min': -1500.0, 'val': 15700.0, 'desc': 'Measured Flight Level'}}) self.assertEqual(packet[0]['I185'], {'Vx': {'max': 8191.75, 'min': -8192.0, 'val': -51.25, 'desc': 'Vx'}, 'Vy': {'max': 8191.75, 'min': -8192.0, 'val': 170.0, 'desc': 'Vy'}}) self.assertEqual(packet[0]['I080'], {'STP': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'MD5': {'meaning': 'No Mode 5 interrogation', 'val': 0, 'desc': ''}, 'FPC': {'meaning': 'Not flight-plan correlated', 'val': 0, 'desc': ''}, 'AMA': {'meaning': 'track not resulting from amalgamation process', 'val': 0, 'desc': ''}, 'CNF': {'meaning': 'Confirmed track', 'val': 0, 'desc': ''}, 'TSE': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'ME': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'FX': {'meaning': 'End of data item', 'val': 0, 'desc': ''}, 'CST': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'PSR': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'MDS': { 'meaning': 'Age of the last received Mode S track update is higher than system dependent threshold', 'val': 1, 'desc': ''}, 'MI': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'SRC': {'meaning': 'height from coverage', 'val': 4, 'desc': 'Source of calculated track altitude for I062/130'}, 'SIM': {'meaning': 'Actual track', 'val': 0, 'desc': ''}, 'KOS': {'meaning': 'Background service used', 'val': 1, 'desc': ''}, 'AFF': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'MRH': {'meaning': 'Barometric altitude (Mode C) more reliable', 'val': 0, 'desc': 'Most Reliable Height'}, 'MON': {'meaning': 'Multisensor track', 'val': 0, 'desc': ''}, 'TSB': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'SUC': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'MD4': {'meaning': 'No Mode 4 interrogation', 'val': 0, 'desc': ''}, 'SPI': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'ADS': { 'meaning': 'Age of the last received ADS-B track update is higher than system dependent threshold', 'val': 1, 'desc': ''}, 'AAC': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'SSR': {'meaning': 'Default value', 'val': 0, 'desc': ''}}) self.assertEqual(packet[0]['I070'], {'ToT': {'val': 30911.6640625, 'desc': 'Time Of Track Information'}}) self.assertEqual(packet[0]['I100'], {'Y': {'val': -106114.0, 'desc': 'Y'}, 'X': {'val': -239083.0, 'desc': 'X'}}) self.assertEqual(packet[0]['I200'], {'VERTA': {'meaning': 'Descent', 'val': 2, 'desc': 'Vertical Rate'}, 'spare': {'const': 0, 'val': 0, 'desc': 'Spare bit set to zero'}, 'LONGA': {'meaning': 'Decreasing Groundspeed', 'val': 2, 'desc': 'Longitudinal Acceleration'}, 'TRANSA': {'meaning': 'Constant Course', 'val': 0, 'desc': 'Transversal Acceleration'}, 'ADF': {'meaning': 'No altitude discrepancy', 'val': 0, 'desc': 'Altitude Discrepancy Flag'}}) self.assertEqual(packet[0]['I130'], { 'Alt': {'max': 150000.0, 'min': -1500.0, 'val': 43300.0, 'desc': 'Altitude'}}) self.assertEqual(packet[0]['I060'], {'CH': {'meaning': 'No Change', 'val': 0, 'desc': 'Change in Mode 3/A'}, 'spare': {'const': 0, 'val': 0, 'desc': 'Spare bits set to 0'}, 'Mode3A': {'val': '4276', 'desc': 'Mode-3/A reply in octal representation'}}) self.assertEqual(packet[0]['I295']['MDA'], {'MDA': {'val': 0, 'desc': ''}}) self.assertEqual(packet[0]['I295']['MFL'], {'MFL': {'val': 0.0, 'desc': ''}}) self.assertEqual(packet[0]['I010'], {'SAC': {'val': 25, 'desc': 'System Area Code'}, 'SIC': {'val': 100, 'desc': 'System Identification Code'}}) self.assertEqual(packet[0]['I340']['TYP'], { 'TYP': {'val': 2, 'meaning': 'Single SSR detection', 'desc': 'Report Type'}, 'TST': {'val': 0, 'meaning': 'Real target report', 'desc': ''}, 'spare': {'val': 0, 'desc': 'Spare bits set to zero', 'const': 0}, 'RAB': {'val': 0, 'meaning': 'Report from aircraft transponder', 'desc': ''}, 'SIM': {'val': 0, 'meaning': 'Actual target report', 'desc': ''}}) self.assertEqual(packet[0]['I340']['SID'], { 'SAC': {'val': 25, 'desc': 'System Area Code'}, 'SIC': {'val': 13, 'desc': 'System Identification Code'}}) self.assertEqual(packet[0]['I340']['MDC'], { 'CG': {'val': 0, 'meaning': 'Default', 'desc': ''}, 'CV': {'val': 0, 'meaning': 'Code validated', 'desc': ''}, 'ModeC': {'max': 1270.0, 'val': 157.0, 'min': -12.0, 'desc': 'Last Measured Mode C Code'}}) self.assertEqual(packet[0]['I340']['MDA'], { 'L': {'val': 0, 'meaning': 'MODE 3/A code as derived from the reply of the transponder', 'desc': ''}, 'V': {'val': 0, 'meaning': 'Code validated', 'desc': ''}, 'Mode3A': {'val': '4276', 'desc': 'Mode 3/A reply under the form of 4 digits in octal representation'}, 'G': {'val': 0, 'meaning': 'Default', 'desc': ''}, 'spare': {'val': 0, 'desc': 'Spare bit set to zero', 'const': 0}}) self.assertEqual(packet[0]['I340']['POS'], { 'RHO': {'max': 256.0, 'val': 186.6875, 'desc': 'Measured distance'}, 'THETA': {'val': 259.453125, 'desc': 'Measured azimuth'}}) self.assertEqual(packet[0]['I105'], { 'Lat': {'val': 44.73441302776337, 'desc': 'Latitude in WGS.84 in twos complement. Range -90 < latitude < 90 deg.'}, 'Lon': {'val': 13.0415278673172, 'desc': 'Longitude in WGS.84 in twos complement. Range -180 < longitude < 180 deg.'}}) self.assertEqual(packet[0]['I040'], {'TrkN': {'val': 4980, 'desc': 'Track number'}}) self.assertEqual(packet[0]['I210'], {'Ax': {'val': 0.0, 'desc': 'Ax'}, 'Ay': {'val': 0.0, 'desc': 'Ay'}}) self.assertEqual(packet[1]['I220'], {'RoC': {'val': 0.0, 'desc': 'Rate of Climb/Descent'}}) self.assertEqual(packet[1]['I015'], {'SID': {'val': 4, 'desc': 'Service Identification'}}) self.assertEqual(packet[1]['I290']['MDS'], {'MDS': {'val': 0.0, 'desc': 'Age of the last Mode S detection used to update the track'}}) self.assertEqual(packet[1]['I290']['SSR'], {'SSR': {'val': 0.0, 'desc': 'Age of the last secondary detection used to update the track'}}) self.assertEqual(packet[1]['I135'], { 'QNH': {'meaning': 'No QNH correction applied', 'val': 0, 'desc': 'QNH'}, 'CTBA': {'max': 150000.0, 'min': -1500.0, 'val': 35000.0, 'desc': 'Calculated Track Barometric Alt'}}) self.assertEqual(packet[1]['I136'], { 'MFL': {'max': 150000.0, 'min': -1500.0, 'val': 35000.0, 'desc': 'Measured Flight Level'}}) self.assertEqual(packet[1]['I185'], {'Vx': {'max': 8191.75, 'min': -8192.0, 'val': 141.5, 'desc': 'Vx'}, 'Vy': {'max': 8191.75, 'min': -8192.0, 'val': -170.75, 'desc': 'Vy'}}) self.assertEqual(packet[1]['I080'], {'STP': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'MD5': {'meaning': 'No Mode 5 interrogation', 'val': 0, 'desc': ''}, 'FPC': {'meaning': 'Flight plan correlated', 'val': 1, 'desc': ''}, 'AMA': {'meaning': 'track not resulting from amalgamation process', 'val': 0, 'desc': ''}, 'CNF': {'meaning': 'Confirmed track', 'val': 0, 'desc': ''}, 'TSE': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'ME': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'FX': {'meaning': 'End of data item', 'val': 0, 'desc': ''}, 'CST': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'PSR': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'MDS': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'MI': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'SRC': {'meaning': 'triangulation', 'val': 3, 'desc': 'Source of calculated track altitude for I062/130'}, 'SIM': {'meaning': 'Actual track', 'val': 0, 'desc': ''}, 'KOS': {'meaning': 'Background service used', 'val': 1, 'desc': ''}, 'AFF': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'MRH': {'meaning': 'Barometric altitude (Mode C) more reliable', 'val': 0, 'desc': 'Most Reliable Height'}, 'MON': {'meaning': 'Multisensor track', 'val': 0, 'desc': ''}, 'TSB': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'SUC': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'MD4': {'meaning': 'No Mode 4 interrogation', 'val': 0, 'desc': ''}, 'SPI': {'meaning': 'default value', 'val': 0, 'desc': ''}, 'ADS': { 'meaning': 'Age of the last received ADS-B track update is higher than system dependent threshold', 'val': 1, 'desc': ''}, 'AAC': {'meaning': 'Default value', 'val': 0, 'desc': ''}, 'SSR': {'meaning': 'Default value', 'val': 0, 'desc': ''}}) self.assertEqual(packet[1]['I070'], {'ToT': {'val': 30911.828125, 'desc': 'Time Of Track Information'}}) self.assertEqual(packet[1]['I100'], {'Y': {'val': -36106.5, 'desc': 'Y'}, 'X': {'val': -72564.5, 'desc': 'X'}}) self.assertEqual(packet[1]['I200'], {'VERTA': {'meaning': 'Level', 'val': 0, 'desc': 'Vertical Rate'}, 'spare': {'const': 0, 'val': 0, 'desc': 'Spare bit set to zero'}, 'LONGA': {'meaning': 'Constant Groundspeed', 'val': 0, 'desc': 'Longitudinal Acceleration'}, 'TRANSA': {'meaning': 'Constant Course', 'val': 0, 'desc': 'Transversal Acceleration'}, 'ADF': {'meaning': 'No altitude discrepancy', 'val': 0, 'desc': 'Altitude Discrepancy Flag'}}) self.assertEqual(packet[1]['I130'], { 'Alt': {'max': 150000.0, 'min': -1500.0, 'val': 35312.5, 'desc': 'Altitude'}}) self.assertEqual(packet[1]['I060'], {'CH': {'meaning': 'No Change', 'val': 0, 'desc': 'Change in Mode 3/A'}, 'spare': {'const': 0, 'val': 0, 'desc': 'Spare bits set to 0'}, 'Mode3A': {'val': '2535',<|fim▁hole|> self.assertEqual(packet[1]['I390']['DEP'], {'DEP': {'desc': 'Departure Airport', 'val': 'EDDL'}}) self.assertEqual(packet[1]['I390']['TAC'], {'TYPE': {'desc': 'Type of Aircraft', 'val': 'B738'}}) self.assertEqual(packet[1]['I390']['DST'],{'DES': {'desc': 'Destination Airport', 'val': 'HELX'}}) self.assertEqual(packet[1]['I390']['IFI'], { 'spare': {'const': 0, 'desc': 'spare bits set to zero', 'val': 0}, 'NBR': {'desc': '', 'val': 29233709}, 'TYP': {'meaning': 'Unit 1 internal flight number', 'desc': '', 'val': 1}}) self.assertEqual(packet[1]['I390']['RDS'], {'NU1': {'desc': 'First number', 'val': ' '}, 'LTR': {'desc': 'Letter', 'val': ' '}, 'NU2': {'desc': 'Second number', 'val': ' '}}) self.assertEqual(packet[1]['I390']['WTC'], {'WTC': {'desc': 'Wake Turbulence Category', 'val': 'M'}}) self.assertEqual(packet[1]['I390']['CSN'], {'CS': {'desc': 'Callsign', 'val': 'SXD4723'}}) self.assertEqual(packet[1]['I390']['TAG'], { 'SIC': {'desc': 'System Identification Code', 'val': 100}, 'SAC': {'desc': 'System Area Code', 'val': 25}}) self.assertEqual(packet[1]['I390']['FCT'], { 'spare': {'const': 0, 'desc': 'spare bit set to zero', 'val': 0}, 'FR1FR2': {'meaning': 'Instrument Flight Rules', 'desc': '', 'val': 0}, 'RVSM': {'meaning': 'Approved', 'desc': '', 'val': 1}, 'GATOAT': {'meaning': 'General Air Traffic', 'desc': '', 'val': 1}, 'HPR': {'meaning': 'Normal Priority Flight', 'desc': '', 'val': 0}}) self.assertEqual(packet[1]['I390']['CFL'], {'CFL': {'desc': 'Current Cleared Flight Level', 'val': 350.0}}) self.assertEqual(packet[1]['I010'], {'SAC': {'val': 25, 'desc': 'System Area Code'}, 'SIC': {'val': 100, 'desc': 'System Identification Code'}}) self.assertEqual(packet[1]['I340']['TYP'], { 'TYP': {'desc': 'Report Type', 'meaning': 'Single ModeS Roll-Call', 'val': 5}, 'TST': {'desc': '', 'meaning': 'Real target report', 'val': 0}, 'SIM': {'desc': '', 'meaning': 'Actual target report', 'val': 0}, 'RAB': {'desc': '', 'meaning': 'Report from aircraft transponder', 'val': 0}, 'spare': {'desc': 'Spare bits set to zero', 'val': 0, 'const': 0}}) self.assertEqual(packet[1]['I340']['POS'], { 'RHO': {'desc': 'Measured distance', 'val': 93.1953125, 'max': 256.0}, 'THETA': {'desc': 'Measured azimuth', 'val': 271.4666748046875}}) self.assertEqual(packet[1]['I340']['MDA'], { 'G': {'desc': '', 'meaning': 'Default', 'val': 0}, 'L': {'desc': '', 'meaning': 'MODE 3/A code as derived from the reply of the transponder', 'val': 0}, 'V': {'desc': '', 'meaning': 'Code validated', 'val': 0}, 'Mode3A': {'desc': 'Mode 3/A reply under the form of 4 digits in octal representation', 'val': '2535'}, 'spare': {'desc': 'Spare bit set to zero', 'val': 0, 'const': 0}}) self.assertEqual(packet[1]['I340']['MDC'], {'ModeC': {'min': -12.0, 'desc': 'Last Measured Mode C Code', 'val': 350.0, 'max': 1270.0}, 'CG': {'desc': '', 'meaning': 'Default', 'val': 0}, 'CV': {'desc': '', 'meaning': 'Code validated', 'val': 0}}) self.assertEqual(packet[1]['I340']['SID'], { 'SIC': {'desc': 'System Identification Code', 'val': 13}, 'SAC': {'desc': 'System Area Code', 'val': 25}}) self.assertEqual(packet[1]['I380']['COM'], { 'COM': {'val': 1, 'meaning': 'Comm. A and Comm. B capability', 'desc': 'Communications capability of the transponder'}, 'SSC': {'val': 1, 'meaning': 'Yes', 'desc': 'Specific service capability'}, 'STAT': {'val': 0, 'meaning': 'No alert, no SPI, aircraft airborne', 'desc': 'Flight Status'}, 'spare': {'val': 0, 'const': 0, 'desc': 'Spare bits set to zero'}, 'B1A': {'val': 1, 'desc': 'BDS 1,0 bit 16'}, 'B1B': {'val': 6, 'desc': 'BDS 1,0 bits 37/40'}, 'ARC': {'val': 1, 'meaning': '25 ft resolution', 'desc': 'Altitude reporting capability'}, 'AIC': {'val': 1, 'meaning': 'Yes', 'desc': 'Aircraft identification capability'}}) self.assertEqual(packet[1]['I380']['ADR'], {'ADR': {'val': '3C0A55', 'desc': 'Target Address'}}) self.assertEqual(packet[1]['I380']['ID'], {'ACID': {'val': 'SXD4723 ', 'desc': 'Target Identification'}}) self.assertEqual(packet[1]['I105'], {'Lat': {'val': 45.40080785751343, 'desc': 'Latitude in WGS.84 in twos complement. Range -90 < latitude < 90 deg.'}, 'Lon': {'val': 15.13318419456482, 'desc': 'Longitude in WGS.84 in twos complement. Range -180 < longitude < 180 deg.'}}) self.assertEqual(packet[1]['I040'], {'TrkN': {'val': 7977, 'desc': 'Track number'}}) self.assertEqual(packet[1]['I210'], {'Ax': {'val': 0.0, 'desc': 'Ax'}, 'Ay': {'val': 0.0, 'desc': 'Ay'}}) self.assertEqual(packet[2]['I015'], {'SID': {'val': 4, 'desc': 'Service Identification'}}) self.assertEqual(packet[2]['I020'], {'BTN': {'val': 24, 'desc': 'Batch Number'}}) self.assertEqual(packet[2]['I010'], {'SAC': {'val': 25, 'desc': 'Source Area Code'}, 'SIC': {'val': 100, 'desc': 'Source Identification Code'}}) self.assertEqual(packet[2]['I030'], {'ToD': {'val': 30913.0546875, 'desc': 'Time Of Message'}}) self.assertEqual(packet[2]['I000'], {'Typ': {'meaning': 'End of Batch', 'val': 2, 'desc': 'Message Type'}}) def main(): unittest.main() if __name__ == '__main__': main()<|fim▁end|>
'desc': 'Mode-3/A reply in octal representation'}}) self.assertEqual(packet[1]['I295']['MFL'], {'MFL': {'val': 0.0, 'desc': ''}})
<|file_name|>SimpleHttpRequest.java<|end_file_name|><|fim▁begin|>/* * Copyright 2015 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.client.http; import java.net.URI; import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.HttpHeaders; import io.netty.handler.codec.http.HttpMethod; /** * A container for information to send in an HTTP request. This is a simpler version of {@link FullHttpRequest} * which only uses a byte array to avoid callers having to worry about memory management. */ public class SimpleHttpRequest { private final URI uri; private final HttpMethod method; private final HttpHeaders headers; private final byte[] content; SimpleHttpRequest(URI uri, HttpMethod method, HttpHeaders headers, byte[] content) { this.uri = uri; this.method = method; this.headers = new ImmutableHttpHeaders(headers); this.content = content; } /** * Returns this request's URI. */ public URI uri() { return uri; } /** * Returns this request's HTTP method. */ public HttpMethod method() { return method; } /** * Returns this request's HTTP headers. */ public HttpHeaders headers() { return headers; } /** * Returns the length of this requests's content. */ public int contentLength() { return content.length; } /** * Reads this request's content into the destination buffer. */ public void readContent(byte[] dst, int offset, int length) { System.arraycopy(content, 0, dst, offset, length); } byte[] content() { return content; } @Override public String toString() { return toString(uri, method, headers, content); } static String toString(URI uri, HttpMethod method, HttpHeaders headers, byte[] content) { StringBuilder buf = new StringBuilder(); buf.append('('); buf.append("uri: ").append(uri); buf.append(", method: ").append(method); buf.append(", headers: ").append(headers); buf.append(", content: "); if (content.length > 0) { buf.append("<length: ").append(content.length).append('>'); } else {<|fim▁hole|> } buf.append(')'); return buf.toString(); } }<|fim▁end|>
buf.append("<none>");
<|file_name|>debugger_panel.py<|end_file_name|><|fim▁begin|>from __future__ import annotations from ..typecheck import * from ..import ui from ..import dap from . import css from .breakpoints_panel import BreakpointsPanel from .input_list_view import InputListView if TYPE_CHECKING: from ..debugger import Debugger class DebuggerPanel(ui.div): on_settings: Callable[[], Any] on_start: Callable[[], Any] on_stop: Callable[[], Any] on_pause: Callable[[], Any] on_continue: Callable[[], Any] on_step_over: Callable[[], Any] on_step_out: Callable[[], Any] on_step_in: Callable[[], Any] def __init__(self, debugger: Debugger, on_navigate_to_source: Callable[[dap.SourceLocation], None]) -> None: super().__init__() self.debugger = debugger self.breakpoints = BreakpointsPanel(debugger.breakpoints, on_navigate_to_source) self.debugger.on_session_state_updated.add(lambda session, state: self.dirty()) self.debugger.on_session_active.add(self.on_selected_session) self.debugger.on_session_added.add(self.on_selected_session) self.last_active_adapter = None def on_selected_session(self, session: dap.Session): self.last_active_adapter = session.adapter_configuration self.dirty() def render(self) -> ui.div.Children: items = [ DebuggerCommandButton(self.on_settings, ui.Images.shared.settings, 'Settings'), DebuggerCommandButton(self.on_start, ui.Images.shared.play, 'Start'), ] if self.debugger.is_stoppable(): items.append(DebuggerCommandButton(self.on_stop, ui.Images.shared.stop, 'Stop'))<|fim▁hole|> items.append(DebuggerCommandButton(self.on_stop, ui.Images.shared.stop_disable, 'Stop (Disabled)')) if self.debugger.is_running(): items.append(DebuggerCommandButton(self.on_pause, ui.Images.shared.pause, 'Pause')) elif self.debugger.is_paused(): items.append(DebuggerCommandButton(self.on_continue, ui.Images.shared.resume, 'Continue')) else: items.append(DebuggerCommandButton(self.on_pause, ui.Images.shared.pause_disable, 'Pause (Disabled)')) if self.debugger.is_paused(): items.extend([ DebuggerCommandButton(self.on_step_over, ui.Images.shared.down, 'Step Over'), DebuggerCommandButton(self.on_step_out, ui.Images.shared.left, 'Step Out'), DebuggerCommandButton(self.on_step_in, ui.Images.shared.right, 'Step In'), ]) else: items.extend([ DebuggerCommandButton(self.on_step_over, ui.Images.shared.down_disable, 'Step Over (Disabled)'), DebuggerCommandButton(self.on_step_out, ui.Images.shared.left_disable, 'Step Out (Disabled)'), DebuggerCommandButton(self.on_step_in, ui.Images.shared.right_disable, 'Step In (Disabled)'), ]) # looks like # current status # breakpoints ... if self.debugger.is_active: self.last_active_adapter = self.debugger.active.adapter_configuration or self.last_active_adapter panel_items: list[ui.div] = [] if self.debugger.is_active: session = self.debugger.active status = session.status if status: panel_items.append(ui.div(height=css.row_height)[ ui.text(status, css=css.label_secondary) ]) if self.last_active_adapter: settings = self.last_active_adapter.settings(self.debugger) for setting in settings: panel_items.append(InputListView(setting)) div = self.last_active_adapter.ui(self.debugger) if div: panel_items.append(div) panel_items.append(self.breakpoints) return [ ui.div()[ ui.div(height=css.header_height)[items], ui.div(width=30 - css.rounded_panel.padding_width, height=1000, css=css.rounded_panel)[ panel_items ], ] ] class DebuggerCommandButton (ui.span): def __init__(self, callback: Callable[[], Any], image: ui.Image, title: str) -> None: super().__init__() self.image = image self.callback = callback self.title = title def render(self) -> ui.span.Children: return [ ui.span(css=css.padding)[ ui.click(self.callback, title=self.title)[ ui.icon(self.image), ] ] ]<|fim▁end|>
else:
<|file_name|>run_command_document_base.py<|end_file_name|><|fim▁begin|># coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class RunCommandDocumentBase(Model):<|fim▁hole|> :param id: The VM run command id. :type id: str :param os_type: The Operating System type. Possible values include: 'Windows', 'Linux' :type os_type: str or ~azure.mgmt.compute.v2017_03_30.models.OperatingSystemTypes :param label: The VM run command label. :type label: str :param description: The VM run command description. :type description: str """ _validation = { 'schema': {'required': True}, 'id': {'required': True}, 'os_type': {'required': True}, 'label': {'required': True}, 'description': {'required': True}, } _attribute_map = { 'schema': {'key': '$schema', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'os_type': {'key': 'osType', 'type': 'OperatingSystemTypes'}, 'label': {'key': 'label', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__(self, schema, id, os_type, label, description): super(RunCommandDocumentBase, self).__init__() self.schema = schema self.id = id self.os_type = os_type self.label = label self.description = description<|fim▁end|>
"""Describes the properties of a Run Command metadata. :param schema: The VM run command schema. :type schema: str
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! General purpose combinators #![allow(unused_imports)] #[cfg(feature = "alloc")] use crate::lib::std::boxed::Box; use crate::error::{ErrorKind, FromExternalError, ParseError}; use crate::internal::*; use crate::lib::std::borrow::Borrow; use crate::lib::std::convert::Into; #[cfg(feature = "std")] use crate::lib::std::fmt::Debug; use crate::lib::std::mem::transmute; use crate::lib::std::ops::{Range, RangeFrom, RangeTo}; use crate::traits::{AsChar, InputIter, InputLength, InputTakeAtPosition, ParseTo}; use crate::traits::{Compare, CompareResult, Offset, Slice}; #[cfg(test)] mod tests; /// Return the remaining input. /// /// ```rust /// # use nom::error::ErrorKind; /// use nom::combinator::rest; /// assert_eq!(rest::<_,(_, ErrorKind)>("abc"), Ok(("", "abc"))); /// assert_eq!(rest::<_,(_, ErrorKind)>(""), Ok(("", ""))); /// ``` #[inline] pub fn rest<T, E: ParseError<T>>(input: T) -> IResult<T, T, E> where T: Slice<RangeFrom<usize>>, T: InputLength, { Ok((input.slice(input.input_len()..), input)) } /// Return the length of the remaining input. /// /// ```rust /// # use nom::error::ErrorKind; /// use nom::combinator::rest_len; /// assert_eq!(rest_len::<_,(_, ErrorKind)>("abc"), Ok(("abc", 3))); /// assert_eq!(rest_len::<_,(_, ErrorKind)>(""), Ok(("", 0))); /// ``` #[inline] pub fn rest_len<T, E: ParseError<T>>(input: T) -> IResult<T, usize, E> where T: InputLength, { let len = input.input_len(); Ok((input, len)) } /// Maps a function on the result of a parser. /// /// ```rust /// use nom::{Err,error::ErrorKind, IResult,Parser}; /// use nom::character::complete::digit1; /// use nom::combinator::map; /// # fn main() { /// /// let mut parser = map(digit1, |s: &str| s.len()); /// /// // the parser will count how many characters were returned by digit1 /// assert_eq!(parser.parse("123456"), Ok(("", 6))); /// /// // this will fail if digit1 fails /// assert_eq!(parser.parse("abc"), Err(Err::Error(("abc", ErrorKind::Digit)))); /// # } /// ``` pub fn map<I, O1, O2, E, F, G>(mut parser: F, mut f: G) -> impl FnMut(I) -> IResult<I, O2, E> where F: Parser<I, O1, E>, G: FnMut(O1) -> O2, { move |input: I| { let (input, o1) = parser.parse(input)?; Ok((input, f(o1))) } } /// Applies a function returning a `Result` over the result of a parser. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::character::complete::digit1; /// use nom::combinator::map_res; /// # fn main() { /// /// let mut parse = map_res(digit1, |s: &str| s.parse::<u8>()); /// /// // the parser will convert the result of digit1 to a number /// assert_eq!(parse("123"), Ok(("", 123))); /// /// // this will fail if digit1 fails /// assert_eq!(parse("abc"), Err(Err::Error(("abc", ErrorKind::Digit)))); /// /// // this will fail if the mapped function fails (a `u8` is too small to hold `123456`) /// assert_eq!(parse("123456"), Err(Err::Error(("123456", ErrorKind::MapRes)))); /// # } /// ``` pub fn map_res<I: Clone, O1, O2, E: FromExternalError<I, E2>, E2, F, G>( mut parser: F, mut f: G, ) -> impl FnMut(I) -> IResult<I, O2, E> where F: Parser<I, O1, E>, G: FnMut(O1) -> Result<O2, E2>, { move |input: I| { let i = input.clone(); let (input, o1) = parser.parse(input)?; match f(o1) { Ok(o2) => Ok((input, o2)), Err(e) => Err(Err::Error(E::from_external_error(i, ErrorKind::MapRes, e))), } } } /// Applies a function returning an `Option` over the result of a parser. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::character::complete::digit1; /// use nom::combinator::map_opt; /// # fn main() { /// /// let mut parse = map_opt(digit1, |s: &str| s.parse::<u8>().ok()); /// /// // the parser will convert the result of digit1 to a number /// assert_eq!(parse("123"), Ok(("", 123))); /// /// // this will fail if digit1 fails /// assert_eq!(parse("abc"), Err(Err::Error(("abc", ErrorKind::Digit)))); /// /// // this will fail if the mapped function fails (a `u8` is too small to hold `123456`) /// assert_eq!(parse("123456"), Err(Err::Error(("123456", ErrorKind::MapOpt)))); /// # } /// ``` pub fn map_opt<I: Clone, O1, O2, E: ParseError<I>, F, G>( mut parser: F, mut f: G, ) -> impl FnMut(I) -> IResult<I, O2, E> where F: Parser<I, O1, E>, G: FnMut(O1) -> Option<O2>, { move |input: I| { let i = input.clone(); let (input, o1) = parser.parse(input)?; match f(o1) { Some(o2) => Ok((input, o2)), None => Err(Err::Error(E::from_error_kind(i, ErrorKind::MapOpt))), } } } /// Applies a parser over the result of another one. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::character::complete::digit1; /// use nom::bytes::complete::take; /// use nom::combinator::map_parser; /// # fn main() { /// /// let mut parse = map_parser(take(5u8), digit1); /// /// assert_eq!(parse("12345"), Ok(("", "12345"))); /// assert_eq!(parse("123ab"), Ok(("", "123"))); /// assert_eq!(parse("123"), Err(Err::Error(("123", ErrorKind::Eof)))); /// # } /// ``` pub fn map_parser<I, O1, O2, E: ParseError<I>, F, G>( mut parser: F, mut applied_parser: G, ) -> impl FnMut(I) -> IResult<I, O2, E> where F: Parser<I, O1, E>, G: Parser<O1, O2, E>, { move |input: I| { let (input, o1) = parser.parse(input)?; let (_, o2) = applied_parser.parse(o1)?; Ok((input, o2)) } } /// Creates a new parser from the output of the first parser, then apply that parser over the rest of the input. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::bytes::complete::take; /// use nom::number::complete::u8; /// use nom::combinator::flat_map; /// # fn main() { /// /// let mut parse = flat_map(u8, take); /// /// assert_eq!(parse(&[2, 0, 1, 2][..]), Ok((&[2][..], &[0, 1][..]))); /// assert_eq!(parse(&[4, 0, 1, 2][..]), Err(Err::Error((&[0, 1, 2][..], ErrorKind::Eof)))); /// # } /// ``` pub fn flat_map<I, O1, O2, E: ParseError<I>, F, G, H>( mut parser: F, mut applied_parser: G, ) -> impl FnMut(I) -> IResult<I, O2, E> where F: Parser<I, O1, E>, G: FnMut(O1) -> H, H: Parser<I, O2, E>, { move |input: I| { let (input, o1) = parser.parse(input)?; applied_parser(o1).parse(input) } } /// Optional parser: Will return `None` if not successful. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::combinator::opt; /// use nom::character::complete::alpha1; /// # fn main() { /// /// fn parser(i: &str) -> IResult<&str, Option<&str>> { /// opt(alpha1)(i) /// } /// /// assert_eq!(parser("abcd;"), Ok((";", Some("abcd")))); /// assert_eq!(parser("123;"), Ok(("123;", None))); /// # } /// ``` pub fn opt<I: Clone, O, E: ParseError<I>, F>(mut f: F) -> impl FnMut(I) -> IResult<I, Option<O>, E> where F: Parser<I, O, E>, { move |input: I| { let i = input.clone(); match f.parse(input) { Ok((i, o)) => Ok((i, Some(o))), Err(Err::Error(_)) => Ok((i, None)), Err(e) => Err(e), } } } /// Calls the parser if the condition is met. /// /// ```rust /// # use nom::{Err, error::{Error, ErrorKind}, IResult}; /// use nom::combinator::cond; /// use nom::character::complete::alpha1; /// # fn main() { /// /// fn parser(b: bool, i: &str) -> IResult<&str, Option<&str>> { /// cond(b, alpha1)(i) /// } /// /// assert_eq!(parser(true, "abcd;"), Ok((";", Some("abcd")))); /// assert_eq!(parser(false, "abcd;"), Ok(("abcd;", None))); /// assert_eq!(parser(true, "123;"), Err(Err::Error(Error::new("123;", ErrorKind::Alpha)))); /// assert_eq!(parser(false, "123;"), Ok(("123;", None))); /// # } /// ``` pub fn cond<I, O, E: ParseError<I>, F>( b: bool, mut f: F, ) -> impl FnMut(I) -> IResult<I, Option<O>, E> where F: Parser<I, O, E>, { move |input: I| { if b { match f.parse(input) { Ok((i, o)) => Ok((i, Some(o))), Err(e) => Err(e), } } else { Ok((input, None)) }<|fim▁hole|>/// Tries to apply its parser without consuming the input. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::combinator::peek; /// use nom::character::complete::alpha1; /// # fn main() { /// /// let mut parser = peek(alpha1); /// /// assert_eq!(parser("abcd;"), Ok(("abcd;", "abcd"))); /// assert_eq!(parser("123;"), Err(Err::Error(("123;", ErrorKind::Alpha)))); /// # } /// ``` pub fn peek<I: Clone, O, E: ParseError<I>, F>(mut f: F) -> impl FnMut(I) -> IResult<I, O, E> where F: Parser<I, O, E>, { move |input: I| { let i = input.clone(); match f.parse(input) { Ok((_, o)) => Ok((i, o)), Err(e) => Err(e), } } } /// returns its input if it is at the end of input data /// /// When we're at the end of the data, this combinator /// will succeed /// /// ``` /// # use std::str; /// # use nom::{Err, error::ErrorKind, IResult}; /// # use nom::combinator::eof; /// /// # fn main() { /// let parser = eof; /// assert_eq!(parser("abc"), Err(Err::Error(("abc", ErrorKind::Eof)))); /// assert_eq!(parser(""), Ok(("", ""))); /// # } /// ``` pub fn eof<I: InputLength + Clone, E: ParseError<I>>(input: I) -> IResult<I, I, E> { if input.input_len() == 0 { let clone = input.clone(); Ok((input, clone)) } else { Err(Err::Error(E::from_error_kind(input, ErrorKind::Eof))) } } /// Transforms Incomplete into `Error`. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::bytes::streaming::take; /// use nom::combinator::complete; /// # fn main() { /// /// let mut parser = complete(take(5u8)); /// /// assert_eq!(parser("abcdefg"), Ok(("fg", "abcde"))); /// assert_eq!(parser("abcd"), Err(Err::Error(("abcd", ErrorKind::Complete)))); /// # } /// ``` pub fn complete<I: Clone, O, E: ParseError<I>, F>(mut f: F) -> impl FnMut(I) -> IResult<I, O, E> where F: Parser<I, O, E>, { move |input: I| { let i = input.clone(); match f.parse(input) { Err(Err::Incomplete(_)) => Err(Err::Error(E::from_error_kind(i, ErrorKind::Complete))), rest => rest, } } } /// Succeeds if all the input has been consumed by its child parser. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::combinator::all_consuming; /// use nom::character::complete::alpha1; /// # fn main() { /// /// let mut parser = all_consuming(alpha1); /// /// assert_eq!(parser("abcd"), Ok(("", "abcd"))); /// assert_eq!(parser("abcd;"),Err(Err::Error((";", ErrorKind::Eof)))); /// assert_eq!(parser("123abcd;"),Err(Err::Error(("123abcd;", ErrorKind::Alpha)))); /// # } /// ``` pub fn all_consuming<I, O, E: ParseError<I>, F>(mut f: F) -> impl FnMut(I) -> IResult<I, O, E> where I: InputLength, F: Parser<I, O, E>, { move |input: I| { let (input, res) = f.parse(input)?; if input.input_len() == 0 { Ok((input, res)) } else { Err(Err::Error(E::from_error_kind(input, ErrorKind::Eof))) } } } /// Returns the result of the child parser if it satisfies a verification function. /// /// The verification function takes as argument a reference to the output of the /// parser. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::combinator::verify; /// use nom::character::complete::alpha1; /// # fn main() { /// /// let mut parser = verify(alpha1, |s: &str| s.len() == 4); /// /// assert_eq!(parser("abcd"), Ok(("", "abcd"))); /// assert_eq!(parser("abcde"), Err(Err::Error(("abcde", ErrorKind::Verify)))); /// assert_eq!(parser("123abcd;"),Err(Err::Error(("123abcd;", ErrorKind::Alpha)))); /// # } /// ``` pub fn verify<I: Clone, O1, O2, E: ParseError<I>, F, G>( mut first: F, second: G, ) -> impl FnMut(I) -> IResult<I, O1, E> where F: Parser<I, O1, E>, G: Fn(&O2) -> bool, O1: Borrow<O2>, O2: ?Sized, { move |input: I| { let i = input.clone(); let (input, o) = first.parse(input)?; if second(o.borrow()) { Ok((input, o)) } else { Err(Err::Error(E::from_error_kind(i, ErrorKind::Verify))) } } } /// Returns the provided value if the child parser succeeds. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::combinator::value; /// use nom::character::complete::alpha1; /// # fn main() { /// /// let mut parser = value(1234, alpha1); /// /// assert_eq!(parser("abcd"), Ok(("", 1234))); /// assert_eq!(parser("123abcd;"), Err(Err::Error(("123abcd;", ErrorKind::Alpha)))); /// # } /// ``` pub fn value<I, O1: Clone, O2, E: ParseError<I>, F>( val: O1, mut parser: F, ) -> impl FnMut(I) -> IResult<I, O1, E> where F: Parser<I, O2, E>, { move |input: I| parser.parse(input).map(|(i, _)| (i, val.clone())) } /// Succeeds if the child parser returns an error. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::combinator::not; /// use nom::character::complete::alpha1; /// # fn main() { /// /// let mut parser = not(alpha1); /// /// assert_eq!(parser("123"), Ok(("123", ()))); /// assert_eq!(parser("abcd"), Err(Err::Error(("abcd", ErrorKind::Not)))); /// # } /// ``` pub fn not<I: Clone, O, E: ParseError<I>, F>(mut parser: F) -> impl FnMut(I) -> IResult<I, (), E> where F: Parser<I, O, E>, { move |input: I| { let i = input.clone(); match parser.parse(input) { Ok(_) => Err(Err::Error(E::from_error_kind(i, ErrorKind::Not))), Err(Err::Error(_)) => Ok((i, ())), Err(e) => Err(e), } } } /// If the child parser was successful, return the consumed input as produced value. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::combinator::recognize; /// use nom::character::complete::{char, alpha1}; /// use nom::sequence::separated_pair; /// # fn main() { /// /// let mut parser = recognize(separated_pair(alpha1, char(','), alpha1)); /// /// assert_eq!(parser("abcd,efgh"), Ok(("", "abcd,efgh"))); /// assert_eq!(parser("abcd;"),Err(Err::Error((";", ErrorKind::Char)))); /// # } /// ``` pub fn recognize<I: Clone + Offset + Slice<RangeTo<usize>>, O, E: ParseError<I>, F>( mut parser: F, ) -> impl FnMut(I) -> IResult<I, I, E> where F: Parser<I, O, E>, { move |input: I| { let i = input.clone(); match parser.parse(i) { Ok((i, _)) => { let index = input.offset(&i); Ok((i, input.slice(..index))) } Err(e) => Err(e), } } } /// if the child parser was successful, return the consumed input with the output /// as a tuple. Functions similarly to [recognize](fn.recognize.html) except it /// returns the parser output as well. /// /// This can be useful especially in cases where the output is not the same type /// as the input, or the input is a user defined type. /// /// Returned tuple is of the format `(consumed input, produced output)`. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::combinator::{consumed, value, recognize, map}; /// use nom::character::complete::{char, alpha1}; /// use nom::bytes::complete::tag; /// use nom::sequence::separated_pair; /// /// fn inner_parser(input: &str) -> IResult<&str, bool> { /// value(true, tag("1234"))(input) /// } /// /// # fn main() { /// /// let mut consumed_parser = consumed(value(true, separated_pair(alpha1, char(','), alpha1))); /// /// assert_eq!(consumed_parser("abcd,efgh1"), Ok(("1", ("abcd,efgh", true)))); /// assert_eq!(consumed_parser("abcd;"),Err(Err::Error((";", ErrorKind::Char)))); /// /// /// // the first output (representing the consumed input) /// // should be the same as that of the `recognize` parser. /// let mut recognize_parser = recognize(inner_parser); /// let mut consumed_parser = map(consumed(inner_parser), |(consumed, output)| consumed); /// /// assert_eq!(recognize_parser("1234"), consumed_parser("1234")); /// assert_eq!(recognize_parser("abcd"), consumed_parser("abcd")); /// # } /// ``` pub fn consumed<I, O, F, E>(mut parser: F) -> impl FnMut(I) -> IResult<I, (I, O), E> where I: Clone + Offset + Slice<RangeTo<usize>>, E: ParseError<I>, F: Parser<I, O, E>, { move |input: I| { let i = input.clone(); match parser.parse(i) { Ok((remaining, result)) => { let index = input.offset(&remaining); let consumed = input.slice(..index); Ok((remaining, (consumed, result))) } Err(e) => Err(e), } } } /// transforms an error to failure /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::combinator::cut; /// use nom::character::complete::alpha1; /// # fn main() { /// /// let mut parser = cut(alpha1); /// /// assert_eq!(parser("abcd;"), Ok((";", "abcd"))); /// assert_eq!(parser("123;"), Err(Err::Failure(("123;", ErrorKind::Alpha)))); /// # } /// ``` pub fn cut<I, O, E: ParseError<I>, F>(mut parser: F) -> impl FnMut(I) -> IResult<I, O, E> where F: Parser<I, O, E>, { move |input: I| match parser.parse(input) { Err(Err::Error(e)) => Err(Err::Failure(e)), rest => rest, } } /// automatically converts the child parser's result to another type /// /// it will be able to convert the output value and the error value /// as long as the `Into` implementations are available /// /// ```rust /// # use nom::IResult; /// use nom::combinator::into; /// use nom::character::complete::alpha1; /// # fn main() { /// /// fn parser1(i: &str) -> IResult<&str, &str> { /// alpha1(i) /// } /// /// let mut parser2 = into(parser1); /// /// // the parser converts the &str output of the child parser into a Vec<u8> /// let bytes: IResult<&str, Vec<u8>> = parser2("abcd"); /// assert_eq!(bytes, Ok(("", vec![97, 98, 99, 100]))); /// # } /// ``` pub fn into<I, O1, O2, E1, E2, F>(mut parser: F) -> impl FnMut(I) -> IResult<I, O2, E2> where O1: Into<O2>, E1: Into<E2>, E1: ParseError<I>, E2: ParseError<I>, F: Parser<I, O1, E1>, { //map(parser, Into::into) move |input: I| match parser.parse(input) { Ok((i, o)) => Ok((i, o.into())), Err(Err::Error(e)) => Err(Err::Error(e.into())), Err(Err::Failure(e)) => Err(Err::Failure(e.into())), Err(Err::Incomplete(e)) => Err(Err::Incomplete(e)), } } /// Creates an iterator from input data and a parser. /// /// Call the iterator's [ParserIterator::finish] method to get the remaining input if successful, /// or the error value if we encountered an error. /// /// ```rust /// use nom::{combinator::iterator, IResult, bytes::complete::tag, character::complete::alpha1, sequence::terminated}; /// use std::collections::HashMap; /// /// let data = "abc|defg|hijkl|mnopqr|123"; /// let mut it = iterator(data, terminated(alpha1, tag("|"))); /// /// let parsed = it.map(|v| (v, v.len())).collect::<HashMap<_,_>>(); /// let res: IResult<_,_> = it.finish(); /// /// assert_eq!(parsed, [("abc", 3usize), ("defg", 4), ("hijkl", 5), ("mnopqr", 6)].iter().cloned().collect()); /// assert_eq!(res, Ok(("123", ()))); /// ``` pub fn iterator<Input, Output, Error, F>(input: Input, f: F) -> ParserIterator<Input, Error, F> where F: Parser<Input, Output, Error>, Error: ParseError<Input>, { ParserIterator { iterator: f, input, state: Some(State::Running), } } /// Main structure associated to the [iterator] function. pub struct ParserIterator<I, E, F> { iterator: F, input: I, state: Option<State<E>>, } impl<I: Clone, E, F> ParserIterator<I, E, F> { /// Returns the remaining input if parsing was successful, or the error if we encountered an error. pub fn finish(mut self) -> IResult<I, (), E> { match self.state.take().unwrap() { State::Running | State::Done => Ok((self.input, ())), State::Failure(e) => Err(Err::Failure(e)), State::Incomplete(i) => Err(Err::Incomplete(i)), } } } impl<'a, Input, Output, Error, F> core::iter::Iterator for &'a mut ParserIterator<Input, Error, F> where F: FnMut(Input) -> IResult<Input, Output, Error>, Input: Clone, { type Item = Output; fn next(&mut self) -> Option<Self::Item> { if let State::Running = self.state.take().unwrap() { let input = self.input.clone(); match (self.iterator)(input) { Ok((i, o)) => { self.input = i; self.state = Some(State::Running); Some(o) } Err(Err::Error(_)) => { self.state = Some(State::Done); None } Err(Err::Failure(e)) => { self.state = Some(State::Failure(e)); None } Err(Err::Incomplete(i)) => { self.state = Some(State::Incomplete(i)); None } } } else { None } } } enum State<E> { Running, Done, Failure(E), Incomplete(Needed), } /// a parser which always succeeds with given value without consuming any input. /// /// It can be used for example as the last alternative in `alt` to /// specify the default case. /// /// ```rust /// # use nom::{Err,error::ErrorKind, IResult}; /// use nom::branch::alt; /// use nom::combinator::{success, value}; /// use nom::character::complete::char; /// # fn main() { /// /// let mut parser = success::<_,_,(_,ErrorKind)>(10); /// assert_eq!(parser("xyz"), Ok(("xyz", 10))); /// /// let mut sign = alt((value(-1, char('-')), value(1, char('+')), success::<_,_,(_,ErrorKind)>(1))); /// assert_eq!(sign("+10"), Ok(("10", 1))); /// assert_eq!(sign("-10"), Ok(("10", -1))); /// assert_eq!(sign("10"), Ok(("10", 1))); /// # } /// ``` pub fn success<I, O: Clone, E: ParseError<I>>(val: O) -> impl Fn(I) -> IResult<I, O, E> { move |input: I| Ok((input, val.clone())) } /// A parser which always fails. /// /// ```rust /// # use nom::{Err, error::ErrorKind, IResult}; /// use nom::combinator::fail; /// /// let s = "string"; /// assert_eq!(fail::<_, &str, _>(s), Err(Err::Error((s, ErrorKind::Fail)))); /// ``` pub fn fail<I, O, E: ParseError<I>>(i: I) -> IResult<I, O, E> { Err(Err::Error(E::from_error_kind(i, ErrorKind::Fail))) }<|fim▁end|>
} }
<|file_name|>test_get_config.py<|end_file_name|><|fim▁begin|>from ipctest import IpcTest from gi.repository import i3ipc import pytest @pytest.mark.skip(reason='TODO') class TestGetConfig(IpcTest): def test_get_config(self, i3): config = i3.get_config()<|fim▁hole|> assert config.config == f.read()<|fim▁end|>
assert isinstance(config, i3ipc.ConfigReply) with open('test/i3.config') as f:
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Scality SA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at<|fim▁hole|># Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools setuptools.setup( setup_requires=['pbr', ], pbr=True, )<|fim▁end|>
# # http://www.apache.org/licenses/LICENSE-2.0 #
<|file_name|>test_artificial_32_Anscombe_LinearTrend_7_12_0.py<|end_file_name|><|fim▁begin|>import pyaf.Bench.TS_datasets as tsds import tests.artificial.process_artificial_dataset as art<|fim▁hole|>art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 7, transform = "Anscombe", sigma = 0.0, exog_count = 0, ar_order = 12);<|fim▁end|>
<|file_name|>data.py<|end_file_name|><|fim▁begin|>import os import sys import numpy as np import matplotlib.image as mpimg from ..core.data import Data from ..util import tryremove URL = 'http://synthia-dataset.cvc.uab.cat/SYNTHIA_SEQS/' SEQS = [ # SUMMER and WINTER from sequences `1 - 6` 'SYNTHIA-SEQS-01-SUMMER', 'SYNTHIA-SEQS-01-WINTER', 'SYNTHIA-SEQS-02-SUMMER', 'SYNTHIA-SEQS-02-WINTER', 'SYNTHIA-SEQS-04-SUMMER', 'SYNTHIA-SEQS-04-WINTER', 'SYNTHIA-SEQS-05-SUMMER', 'SYNTHIA-SEQS-05-WINTER', 'SYNTHIA-SEQS-06-SUMMER', 'SYNTHIA-SEQS-06-WINTER' ] DEV_SEQS = ['SYNTHIA-SEQS-01-SUMMER']<|fim▁hole|> class SynthiaData(Data): dirs = ['synthia'] def __init__(self, data_dir, stat_log_dir=None, development=True, fast_dir=None): super().__init__(data_dir, stat_log_dir, development=development, fast_dir=fast_dir) def _fetch_if_missing(self): self._maybe_get_synthia() def get_raw_dirs(self): root_dir = os.path.join(self.current_dir, 'synthia') dirs = [] seqs = os.listdir(root_dir) for seq in seqs: seq_dir = os.path.join(root_dir, seq, seq, 'RGB', 'Stereo_Left') views = os.listdir(seq_dir) for view in views: view_dir = os.path.join(seq_dir, view) dirs.extend([view_dir]) return dirs def _maybe_get_synthia(self): seqs = DEV_SEQS if self.development else SEQS for seq in seqs: root_dir = os.path.join(self.data_dir, 'synthia') url = URL + seq + '.rar' url_dir = os.path.join(root_dir, seq) if not os.path.isdir(url_dir): self._download_and_extract(url, url_dir, 'rar') # Remove unused directories tryremove(os.path.join(url_dir, seq, 'GT')) tryremove(os.path.join(url_dir, seq, 'Depth')) tryremove(os.path.join(url_dir, seq, 'CameraParams')) tryremove(os.path.join(url_dir, 'RGB', 'Stereo_Right'))<|fim▁end|>
<|file_name|>User.js<|end_file_name|><|fim▁begin|>const TextBasedChannel = require('./interfaces/TextBasedChannel'); const Constants = require('../util/Constants'); const { Presence } = require('./Presence'); const UserProfile = require('./UserProfile'); const Snowflake = require('../util/Snowflake'); const { Error } = require('../errors'); /** * Represents a user on Discord. * @implements {TextBasedChannel} */ class User { constructor(client, data) { /** * The client that created the instance of the user * @name User#client * @type {Client} * @readonly */ Object.defineProperty(this, 'client', { value: client }); if (data) this.setup(data); } setup(data) { /** * The ID of the user * @type {Snowflake} */ this.id = data.id; /**<|fim▁hole|> */ this.username = data.username; /** * A discriminator based on username for the user * @type {string} */ this.discriminator = data.discriminator; /** * The ID of the user's avatar * @type {string} */ this.avatar = data.avatar; /** * Whether or not the user is a bot * @type {boolean} */ this.bot = Boolean(data.bot); /** * The ID of the last message sent by the user, if one was sent * @type {?Snowflake} */ this.lastMessageID = null; /** * The Message object of the last message sent by the user, if one was sent * @type {?Message} */ this.lastMessage = null; } patch(data) { for (const prop of ['id', 'username', 'discriminator', 'avatar', 'bot']) { if (typeof data[prop] !== 'undefined') this[prop] = data[prop]; } if (data.token) this.client.token = data.token; } /** * The timestamp the user was created at * @type {number} * @readonly */ get createdTimestamp() { return Snowflake.deconstruct(this.id).timestamp; } /** * The time the user was created * @type {Date} * @readonly */ get createdAt() { return new Date(this.createdTimestamp); } /** * The presence of this user * @type {Presence} * @readonly */ get presence() { if (this.client.presences.has(this.id)) return this.client.presences.get(this.id); for (const guild of this.client.guilds.values()) { if (guild.presences.has(this.id)) return guild.presences.get(this.id); } return new Presence(); } /** * A link to the user's avatar * @param {Object} [options={}] Options for the avatar url * @param {string} [options.format='webp'] One of `webp`, `png`, `jpg`, `gif`. If no format is provided, * it will be `gif` for animated avatars or otherwise `webp` * @param {number} [options.size=128] One of `128`, `256`, `512`, `1024`, `2048` * @returns {?string} */ avatarURL({ format, size } = {}) { if (!this.avatar) return null; return Constants.Endpoints.CDN(this.client.options.http.cdn).Avatar(this.id, this.avatar, format, size); } /** * A link to the user's default avatar * @type {string} * @readonly */ get defaultAvatarURL() { return Constants.Endpoints.CDN(this.client.options.http.cdn).DefaultAvatar(this.discriminator % 5); } /** * A link to the user's avatar if they have one. Otherwise a link to their default avatar will be returned * @param {Object} [options={}] Options for the avatar url * @param {string} [options.format='webp'] One of `webp`, `png`, `jpg`, `gif`. If no format is provided, * it will be `gif` for animated avatars or otherwise `webp` * @param {number} [options.size=128] One of `128`, '256', `512`, `1024`, `2048` * @returns {string} */ displayAvatarURL(options) { return this.avatarURL(options) || this.defaultAvatarURL; } /** * The Discord "tag" for this user * @type {string} * @readonly */ get tag() { return `${this.username}#${this.discriminator}`; } /** * The note that is set for the user * <warn>This is only available when using a user account.</warn> * @type {?string} * @readonly */ get note() { return this.client.user.notes.get(this.id) || null; } /** * Check whether the user is typing in a channel. * @param {ChannelResolvable} channel The channel to check in * @returns {boolean} */ typingIn(channel) { channel = this.client.resolver.resolveChannel(channel); return channel._typing.has(this.id); } /** * Get the time that the user started typing. * @param {ChannelResolvable} channel The channel to get the time in * @returns {?Date} */ typingSinceIn(channel) { channel = this.client.resolver.resolveChannel(channel); return channel._typing.has(this.id) ? new Date(channel._typing.get(this.id).since) : null; } /** * Get the amount of time the user has been typing in a channel for (in milliseconds), or -1 if they're not typing. * @param {ChannelResolvable} channel The channel to get the time in * @returns {number} */ typingDurationIn(channel) { channel = this.client.resolver.resolveChannel(channel); return channel._typing.has(this.id) ? channel._typing.get(this.id).elapsedTime : -1; } /** * The DM between the client's user and this user * @type {?DMChannel} * @readonly */ get dmChannel() { return this.client.channels.filter(c => c.type === 'dm').find(c => c.recipient.id === this.id); } /** * Creates a DM channel between the client and the user. * @returns {Promise<DMChannel>} */ createDM() { if (this.dmChannel) return Promise.resolve(this.dmChannel); return this.client.api.users(this.client.user.id).channels.post({ data: { recipient_id: this.id, } }) .then(data => this.client.actions.ChannelCreate.handle(data).channel); } /** * Deletes a DM channel (if one exists) between the client and the user. Resolves with the channel if successful. * @returns {Promise<DMChannel>} */ deleteDM() { if (!this.dmChannel) return Promise.reject(new Error('USER_NO_DMCHANNEL')); return this.client.api.channels(this.dmChannel.id).delete() .then(data => this.client.actions.ChannelDelete.handle(data).channel); } /** * Get the profile of the user. * <warn>This is only available when using a user account.</warn> * @returns {Promise<UserProfile>} */ fetchProfile() { return this.client.api.users(this.id).profile.get().then(data => new UserProfile(this, data)); } /** * Sets a note for the user. * <warn>This is only available when using a user account.</warn> * @param {string} note The note to set for the user * @returns {Promise<User>} */ setNote(note) { return this.client.api.users('@me').notes(this.id).put({ data: { note } }) .then(() => this); } /** * Checks if the user is equal to another. It compares ID, username, discriminator, avatar, and bot flags. * It is recommended to compare equality by using `user.id === user2.id` unless you want to compare all properties. * @param {User} user User to compare with * @returns {boolean} */ equals(user) { let equal = user && this.id === user.id && this.username === user.username && this.discriminator === user.discriminator && this.avatar === user.avatar && this.bot === Boolean(user.bot); return equal; } /** * When concatenated with a string, this automatically concatenates the user's mention instead of the User object. * @returns {string} * @example * // logs: Hello from <@123456789>! * console.log(`Hello from ${user}!`); */ toString() { return `<@${this.id}>`; } // These are here only for documentation purposes - they are implemented by TextBasedChannel /* eslint-disable no-empty-function */ send() {} } TextBasedChannel.applyToClass(User); module.exports = User;<|fim▁end|>
* The username of the user * @type {string}
<|file_name|>service.go<|end_file_name|><|fim▁begin|>package diagnostic import ( "bytes" "errors" "io" "os" "path" "strings" "sync" "go.uber.org/zap" "go.uber.org/zap/zapcore" ) type nopCloser struct { f io.Writer } func (c *nopCloser) Write(b []byte) (int, error) { return c.f.Write(b) } func (c *nopCloser) Close() error { return nil } type Service struct { c Config Logger Logger f io.WriteCloser stdout io.Writer stderr io.Writer SessionService *SessionService levelMu sync.RWMutex level string } func NewService(c Config, stdout, stderr io.Writer) *Service { return &Service{ c: c, stdout: stdout, stderr: stderr, } } func BootstrapMainHandler() *CmdHandler { s := NewService(NewConfig(), nil, os.Stderr) // Should never error _ = s.Open() return s.NewCmdHandler() } func (s *Service) SetLogLevelFromName(lvl string) error { s.levelMu.Lock() defer s.levelMu.Unlock() level := strings.ToUpper(lvl) switch level { case "INFO", "ERROR", "DEBUG": s.level = level default: return errors.New("invalid log level") } return nil } func logLevelFromName(lvl string) Level { var level Level switch lvl { case "INFO", "info": level = InfoLevel case "ERROR", "error": level = ErrorLevel case "DEBUG", "debug": level = DebugLevel } return level } func (s *Service) Open() error { s.levelMu.Lock() s.level = s.c.Level s.levelMu.Unlock() levelF := func(lvl Level) bool { s.levelMu.RLock() defer s.levelMu.RUnlock() return lvl >= logLevelFromName(s.level) } switch s.c.File { case "STDERR": s.f = &nopCloser{f: s.stderr} case "STDOUT": s.f = &nopCloser{f: s.stdout} default: dir := path.Dir(s.c.File) if _, err := os.Stat(dir); os.IsNotExist(err) { err := os.MkdirAll(dir, 0755) if err != nil { return err } } f, err := os.OpenFile(s.c.File, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0640) if err != nil { return err } s.f = f } l := NewServerLogger(s.f) l.SetLevelF(levelF) s.SessionService = NewSessionService() s.Logger = NewMultiLogger( l, s.SessionService.NewLogger(), ) s.SessionService.SetDiagnostic(s.NewSessionHandler()) return nil } func (s *Service) Close() error { if s.f != nil { return s.f.Close() } return nil } func (s *Service) NewSideloadHandler() *SideloadHandler { return &SideloadHandler{ l: s.Logger.With(String("service", "sideload")), } } func (s *Service) NewVictorOpsHandler() *VictorOpsHandler { return &VictorOpsHandler{ l: s.Logger.With(String("service", "victorops")), } } func (s *Service) NewSlackHandler() *SlackHandler { return &SlackHandler{ l: s.Logger.With(String("service", "slack")), } } func (s *Service) NewDiscordHandler() *DiscordHandler { return &DiscordHandler{ l: s.Logger.With(String("service", "discord")), } } func (s *Service) NewBigPandaHandler() *BigPandaHandler { return &BigPandaHandler{ l: s.Logger.With(String("service", "bigpanda")), } } func (s *Service) NewTaskStoreHandler() *TaskStoreHandler { return &TaskStoreHandler{ l: s.Logger.With(String("service", "task_store")), } } func (s *Service) NewReportingHandler() *ReportingHandler { return &ReportingHandler{ l: s.Logger.With(String("service", "reporting")), } } func (s *Service) NewStorageHandler() *StorageHandler { return &StorageHandler{ l: s.Logger.With(String("service", "storage")), } } func (s *Service) NewHTTPDHandler() *HTTPDHandler { return &HTTPDHandler{ l: s.Logger.With(String("service", "http")), } } func (s *Service) NewAlertaHandler() *AlertaHandler { return &AlertaHandler{ l: s.Logger.With(String("service", "alerta")), } } func (s *Service) NewKapacitorHandler() *KapacitorHandler { return &KapacitorHandler{ l: s.Logger.With(String("service", "kapacitor")), } } func (s *Service) NewAlertServiceHandler() *AlertServiceHandler { return &AlertServiceHandler{ L: s.Logger.With(String("service", "alert")), } } func (s *Service) NewHipChatHandler() *HipChatHandler { return &HipChatHandler{ l: s.Logger.With(String("service", "hipchat")), } } func (s *Service) NewKafkaHandler() *KafkaHandler { return &KafkaHandler{ l: s.Logger.With(String("service", "kafka")), } } func (s *Service) NewPagerDutyHandler() *PagerDutyHandler { return &PagerDutyHandler{ l: s.Logger.With(String("service", "pagerduty")), } } func (s *Service) NewPagerDuty2Handler() *PagerDuty2Handler { return &PagerDuty2Handler{ l: s.Logger.With(String("service", "pagerduty2")), } } func (s *Service) NewSMTPHandler() *SMTPHandler { return &SMTPHandler{ l: s.Logger.With(String("service", "smtp")), } } func (s *Service) NewUDFServiceHandler() *UDFServiceHandler { return &UDFServiceHandler{ l: s.Logger.With(String("service", "udf")), } } func (s *Service) NewOpsGenieHandler() *OpsGenieHandler { return &OpsGenieHandler{ l: s.Logger.With(String("service", "opsgenie")), } } func (s *Service) NewOpsGenie2Handler() *OpsGenie2Handler { return &OpsGenie2Handler{ l: s.Logger.With(String("service", "opsgenie2")), } } func (s *Service) NewPushoverHandler() *PushoverHandler { return &PushoverHandler{ l: s.Logger.With(String("service", "pushover")), } } func (s *Service) NewHTTPPostHandler() *HTTPPostHandler { return &HTTPPostHandler{ l: s.Logger.With(String("service", "httppost")), } } func (s *Service) NewSensuHandler() *SensuHandler { return &SensuHandler{ l: s.Logger.With(String("service", "sensu")), } } func (s *Service) NewSNMPTrapHandler() *SNMPTrapHandler { return &SNMPTrapHandler{ l: s.Logger.With(String("service", "snmp")), } } func (s *Service) NewTelegramHandler() *TelegramHandler { return &TelegramHandler{ l: s.Logger.With(String("service", "telegram")), } } func (s *Service) NewMQTTHandler() *MQTTHandler { return &MQTTHandler{ l: s.Logger.With(String("service", "mqtt")), } } func (s *Service) NewTalkHandler() *TalkHandler {<|fim▁hole|> return &TalkHandler{ l: s.Logger.With(String("service", "talk")), } } func (s *Service) NewConfigOverrideHandler() *ConfigOverrideHandler { return &ConfigOverrideHandler{ l: s.Logger.With(String("service", "config-override")), } } func (s *Service) NewServerHandler() *ServerHandler { return &ServerHandler{ l: s.Logger.With(String("source", "srv")), } } func (s *Service) NewReplayHandler() *ReplayHandler { return &ReplayHandler{ l: s.Logger.With(String("service", "replay")), } } func (s *Service) NewK8sHandler() *K8sHandler { return &K8sHandler{ l: s.Logger.With(String("service", "kubernetes")), } } func (s *Service) NewSwarmHandler() *SwarmHandler { return &SwarmHandler{ l: s.Logger.With(String("service", "swarm")), } } func (s *Service) NewEC2Handler() *EC2Handler { return &EC2Handler{ ScraperHandler: &ScraperHandler{ l: s.Logger.With(String("service", "ec2")), buf: bytes.NewBuffer(nil), }, } } func (s *Service) NewDeadmanHandler() *DeadmanHandler { return &DeadmanHandler{ l: s.Logger.With(String("service", "deadman")), } } func (s *Service) NewNoAuthHandler() *NoAuthHandler { return &NoAuthHandler{ l: s.Logger.With(String("service", "noauth")), } } func (s *Service) NewAuthHandler() *AuthHandler { return &AuthHandler{ l: s.Logger.With(String("service", "auth")), } } func (s *Service) NewStatsHandler() *StatsHandler { return &StatsHandler{ l: s.Logger.With(String("service", "stats")), } } func (s *Service) NewUDPHandler() *UDPHandler { return &UDPHandler{ l: s.Logger.With(String("service", "udp")), } } func (s *Service) NewInfluxDBHandler() *InfluxDBHandler { return &InfluxDBHandler{ l: s.Logger.With(String("service", "influxdb")), } } func (s *Service) NewScraperHandler() *ScraperHandler { return &ScraperHandler{ l: s.Logger.With(String("service", "scraper")), buf: bytes.NewBuffer(nil), } } func (s *Service) NewAzureHandler() *ScraperHandler { return &ScraperHandler{ l: s.Logger.With(String("service", "azure")), buf: bytes.NewBuffer(nil), } } func (s *Service) NewConsulHandler() *ScraperHandler { return &ScraperHandler{ l: s.Logger.With(String("service", "consul")), buf: bytes.NewBuffer(nil), } } func (s *Service) NewDNSHandler() *ScraperHandler { return &ScraperHandler{ l: s.Logger.With(String("service", "dns")), buf: bytes.NewBuffer(nil), } } func (s *Service) NewFileDiscoveryHandler() *ScraperHandler { return &ScraperHandler{ l: s.Logger.With(String("service", "file-discovery")), buf: bytes.NewBuffer(nil), } } func (s *Service) NewGCEHandler() *ScraperHandler { return &ScraperHandler{ l: s.Logger.With(String("service", "gce")), buf: bytes.NewBuffer(nil), } } func (s *Service) NewMarathonHandler() *ScraperHandler { return &ScraperHandler{ l: s.Logger.With(String("service", "marathon")), buf: bytes.NewBuffer(nil), } } func (s *Service) NewNerveHandler() *ScraperHandler { return &ScraperHandler{ l: s.Logger.With(String("service", "nerve")), buf: bytes.NewBuffer(nil), } } func (s *Service) NewServersetHandler() *ScraperHandler { return &ScraperHandler{ l: s.Logger.With(String("service", "serverset")), buf: bytes.NewBuffer(nil), } } func (s *Service) NewStaticDiscoveryHandler() *ScraperHandler { return &ScraperHandler{ l: s.Logger.With(String("service", "static-discovery")), buf: bytes.NewBuffer(nil), } } func (s *Service) NewTritonHandler() *ScraperHandler { return &ScraperHandler{ l: s.Logger.With(String("service", "triton")), buf: bytes.NewBuffer(nil), } } func (s *Service) NewStaticLevelHandler(level string, service string) (*StaticLevelHandler, error) { var ll logLevel switch level { case "debug": ll = llDebug case "error": ll = llError case "info": ll = llInfo default: ll = llInvalid } if ll == llInvalid { return nil, errors.New("invalid log level") } return &StaticLevelHandler{ l: s.Logger.With(String("service", service)), level: ll, }, nil } func (s *Service) NewCmdHandler() *CmdHandler { return &CmdHandler{ l: s.Logger.With(String("service", "run")), } } func (s *Service) NewSessionHandler() *SessionHandler { return &SessionHandler{ l: s.Logger.With(String("service", "sessions")), } } func (s *Service) NewLoadHandler() *LoadHandler { return &LoadHandler{ l: s.Logger.With(String("service", "load")), } } func (s *Service) NewTeamsHandler() *TeamsHandler { return &TeamsHandler{ l: s.Logger.With(String("service", "teams")), } } func (s *Service) NewServiceNowHandler() *ServiceNowHandler { return &ServiceNowHandler{ l: s.Logger.With(String("service", "serviceNow")), } } func (s *Service) NewZenossHandler() *ZenossHandler { return &ZenossHandler{ l: s.Logger.With(String("service", "zenoss")), } } func (s *Service) NewZapLogger(level zapcore.Level) *zap.Logger { return zap.New(&zapAdapter{ LevelEnabler: zap.LevelEnablerFunc(func(l zapcore.Level) bool { return l >= level }), out: s.Logger, }) }<|fim▁end|>
<|file_name|>explorerModel.test.ts<|end_file_name|><|fim▁begin|>/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import * as assert from 'assert'; import { isLinux, isWindows } from 'vs/base/common/platform'; import { URI } from 'vs/base/common/uri'; import { join } from 'vs/base/common/paths'; import { validateFileName } from 'vs/workbench/parts/files/electron-browser/fileActions'; import { ExplorerItem } from 'vs/workbench/parts/files/common/explorerModel'; function createStat(path: string, name: string, isFolder: boolean, hasChildren: boolean, size: number, mtime: number): ExplorerItem { return new ExplorerItem(toResource(path), null, isFolder, false, false, name, mtime); } function toResource(path) { if (isWindows) { return URI.file(join('C:\\', path)); } else { return URI.file(join('/home/john', path)); } } suite('Files - View Model', () => { test('Properties', () => { const d = new Date().getTime(); let s = createStat('/path/to/stat', 'sName', true, true, 8096, d); assert.strictEqual(s.isDirectoryResolved, false); assert.strictEqual(s.resource.fsPath, toResource('/path/to/stat').fsPath); assert.strictEqual(s.name, 'sName'); assert.strictEqual(s.isDirectory, true); assert.strictEqual(s.mtime, new Date(d).getTime()); s = createStat('/path/to/stat', 'sName', false, false, 8096, d); }); test('Add and Remove Child, check for hasChild', function () { const d = new Date().getTime(); const s = createStat('/path/to/stat', 'sName', true, false, 8096, d); const child1 = createStat('/path/to/stat/foo', 'foo', true, false, 8096, d); const child4 = createStat('/otherpath/to/other/otherbar.html', 'otherbar.html', false, false, 8096, d); s.addChild(child1); assert(!!s.getChild(child1.name)); s.removeChild(child1); s.addChild(child1); assert(!!s.getChild(child1.name)); s.removeChild(child1); assert(!s.getChild(child1.name)); // Assert that adding a child updates its path properly s.addChild(child4); assert.strictEqual(child4.resource.fsPath, toResource('/path/to/stat/' + child4.name).fsPath); }); test('Move', () => { const d = new Date().getTime(); const s1 = createStat('/', '/', true, false, 8096, d); const s2 = createStat('/path', 'path', true, false, 8096, d); const s3 = createStat('/path/to', 'to', true, false, 8096, d); const s4 = createStat('/path/to/stat', 'stat', false, false, 8096, d); s1.addChild(s2); s2.addChild(s3); s3.addChild(s4); s4.move(s1); // Assert the new path of the moved element assert.strictEqual(s4.resource.fsPath, toResource('/' + s4.name).fsPath); // Move a subtree with children const leaf = createStat('/leaf', 'leaf', true, false, 8096, d); const leafC1 = createStat('/leaf/folder', 'folder', true, false, 8096, d); const leafCC2 = createStat('/leaf/folder/index.html', 'index.html', true, false, 8096, d); leaf.addChild(leafC1); leafC1.addChild(leafCC2); s1.addChild(leaf); leafC1.move(s3); assert.strictEqual(leafC1.resource.fsPath, URI.file(s3.resource.fsPath + '/' + leafC1.name).fsPath); assert.strictEqual(leafCC2.resource.fsPath, URI.file(leafC1.resource.fsPath + '/' + leafCC2.name).fsPath); }); test('Rename', () => { const d = new Date().getTime(); const s1 = createStat('/', '/', true, false, 8096, d); const s2 = createStat('/path', 'path', true, false, 8096, d); const s3 = createStat('/path/to', 'to', true, false, 8096, d); const s4 = createStat('/path/to/stat', 'stat', true, false, 8096, d); s1.addChild(s2); s2.addChild(s3); s3.addChild(s4); assert.strictEqual(s1.getChild(s2.name), s2); const s2renamed = createStat('/otherpath', 'otherpath', true, true, 8096, d); s2.rename(s2renamed); assert.strictEqual(s1.getChild(s2.name), s2); // Verify the paths have changed including children assert.strictEqual(s2.name, s2renamed.name); assert.strictEqual(s2.resource.fsPath, s2renamed.resource.fsPath); assert.strictEqual(s3.resource.fsPath, toResource('/otherpath/to').fsPath); assert.strictEqual(s4.resource.fsPath, toResource('/otherpath/to/stat').fsPath); const s4renamed = createStat('/otherpath/to/statother.js', 'statother.js', true, false, 8096, d); s4.rename(s4renamed); assert.strictEqual(s3.getChild(s4.name), s4); assert.strictEqual(s4.name, s4renamed.name); assert.strictEqual(s4.resource.fsPath, s4renamed.resource.fsPath); }); test('Find', () => { const d = new Date().getTime(); const s1 = createStat('/', '/', true, false, 8096, d); const s2 = createStat('/path', 'path', true, false, 8096, d); const s3 = createStat('/path/to', 'to', true, false, 8096, d); const s4 = createStat('/path/to/stat', 'stat', true, false, 8096, d); const s4Upper = createStat('/path/to/STAT', 'stat', true, false, 8096, d); const child1 = createStat('/path/to/stat/foo', 'foo', true, false, 8096, d); const child2 = createStat('/path/to/stat/foo/bar.html', 'bar.html', false, false, 8096, d); s1.addChild(s2); s2.addChild(s3); s3.addChild(s4); s4.addChild(child1); child1.addChild(child2); assert.strictEqual(s1.find(child2.resource), child2); assert.strictEqual(s1.find(child1.resource), child1); assert.strictEqual(s1.find(s4.resource), s4); assert.strictEqual(s1.find(s3.resource), s3); assert.strictEqual(s1.find(s2.resource), s2); if (isLinux) { assert.ok(!s1.find(s4Upper.resource)); } else { assert.strictEqual(s1.find(s4Upper.resource), s4); } assert.strictEqual(s1.find(toResource('foobar')), null); assert.strictEqual(s1.find(toResource('/')), s1); assert.strictEqual(s1.find(toResource('')), s1); }); test('Find with mixed case', function () { const d = new Date().getTime(); const s1 = createStat('/', '/', true, false, 8096, d); const s2 = createStat('/path', 'path', true, false, 8096, d); const s3 = createStat('/path/to', 'to', true, false, 8096, d); const s4 = createStat('/path/to/stat', 'stat', true, false, 8096, d); const child1 = createStat('/path/to/stat/foo', 'foo', true, false, 8096, d); const child2 = createStat('/path/to/stat/foo/bar.html', 'bar.html', false, false, 8096, d); s1.addChild(s2); s2.addChild(s3); s3.addChild(s4); s4.addChild(child1); child1.addChild(child2); if (isLinux) { // linux is case sensitive assert.ok(!s1.find(toResource('/path/to/stat/Foo'))); assert.ok(!s1.find(toResource('/Path/to/stat/foo/bar.html'))); } else { assert.ok(s1.find(toResource('/path/to/stat/Foo'))); assert.ok(s1.find(toResource('/Path/to/stat/foo/bar.html'))); } }); test('Validate File Name (For Create)', function () { const d = new Date().getTime(); const s = createStat('/path/to/stat', 'sName', true, true, 8096, d); const sChild = createStat('/path/to/stat/alles.klar', 'alles.klar', true, true, 8096, d); s.addChild(sChild); assert(validateFileName(s, null!) !== null); assert(validateFileName(s, '') !== null); assert(validateFileName(s, ' ') !== null); assert(validateFileName(s, 'Read Me') === null, 'name containing space'); if (isWindows) { assert(validateFileName(s, 'foo:bar') !== null); assert(validateFileName(s, 'foo*bar') !== null); assert(validateFileName(s, 'foo?bar') !== null); assert(validateFileName(s, 'foo<bar') !== null); assert(validateFileName(s, 'foo>bar') !== null); assert(validateFileName(s, 'foo|bar') !== null); } assert(validateFileName(s, 'alles.klar') === null); assert(validateFileName(s, '.foo') === null); assert(validateFileName(s, 'foo.bar') === null); assert(validateFileName(s, 'foo') === null);<|fim▁hole|> const d = new Date().getTime(); const s = createStat('/path/to/stat', 'sName', true, true, 8096, d); const sChild = createStat('/path/to/stat/alles.klar', 'alles.klar', true, true, 8096, d); s.addChild(sChild); assert(validateFileName(s, 'alles.klar') === null); assert(validateFileName(s, 'Alles.klar') === null); assert(validateFileName(s, 'Alles.Klar') === null); assert(validateFileName(s, '.foo') === null); assert(validateFileName(s, 'foo.bar') === null); assert(validateFileName(s, 'foo') === null); }); test('Validate Multi-Path File Names', function () { const d = new Date().getTime(); const wsFolder = createStat('/', 'workspaceFolder', true, false, 8096, d); assert(validateFileName(wsFolder, 'foo/bar') === null); assert(validateFileName(wsFolder, 'foo\\bar') === null); assert(validateFileName(wsFolder, 'all/slashes/are/same') === null); assert(validateFileName(wsFolder, 'theres/one/different\\slash') === null); assert(validateFileName(wsFolder, '/slashAtBeginning') !== null); // attempting to add a child to a deeply nested file const s1 = createStat('/path', 'path', true, false, 8096, d); const s2 = createStat('/path/to', 'to', true, false, 8096, d); const s3 = createStat('/path/to/stat', 'stat', true, false, 8096, d); wsFolder.addChild(s1); s1.addChild(s2); s2.addChild(s3); const fileDeeplyNested = createStat('/path/to/stat/fileNested', 'fileNested', false, false, 8096, d); s3.addChild(fileDeeplyNested); assert(validateFileName(wsFolder, '/path/to/stat/fileNested/aChild') !== null); // detect if path already exists assert(validateFileName(wsFolder, '/path/to/stat/fileNested') !== null); assert(validateFileName(wsFolder, '/path/to/stat/') !== null); }); test('Merge Local with Disk', function () { const d = new Date().toUTCString(); const merge1 = new ExplorerItem(URI.file(join('C:\\', '/path/to')), undefined, true, false, false, 'to', Date.now(), d); const merge2 = new ExplorerItem(URI.file(join('C:\\', '/path/to')), undefined, true, false, false, 'to', Date.now(), new Date(0).toUTCString()); // Merge Properties ExplorerItem.mergeLocalWithDisk(merge2, merge1); assert.strictEqual(merge1.mtime, merge2.mtime); // Merge Child when isDirectoryResolved=false is a no-op merge2.addChild(new ExplorerItem(URI.file(join('C:\\', '/path/to/foo.html')), undefined, true, false, false, 'foo.html', Date.now(), d)); ExplorerItem.mergeLocalWithDisk(merge2, merge1); // Merge Child with isDirectoryResolved=true const child = new ExplorerItem(URI.file(join('C:\\', '/path/to/foo.html')), undefined, true, false, false, 'foo.html', Date.now(), d); merge2.removeChild(child); merge2.addChild(child); (<any>merge2)._isDirectoryResolved = true; ExplorerItem.mergeLocalWithDisk(merge2, merge1); assert.strictEqual(merge1.getChild('foo.html').name, 'foo.html'); assert.deepEqual(merge1.getChild('foo.html').parent, merge1, 'Check parent'); // Verify that merge does not replace existing children, but updates properties in that case const existingChild = merge1.getChild('foo.html'); ExplorerItem.mergeLocalWithDisk(merge2, merge1); assert.ok(existingChild === merge1.getChild(existingChild!.name)); }); });<|fim▁end|>
}); test('Validate File Name (For Rename)', function () {
<|file_name|>torque-compiler.cc<|end_file_name|><|fim▁begin|>// Copyright 2019 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/torque/torque-compiler.h" #include <fstream> #include "src/torque/declarable.h" #include "src/torque/declaration-visitor.h" #include "src/torque/global-context.h" #include "src/torque/implementation-visitor.h" #include "src/torque/torque-parser.h" #include "src/torque/type-oracle.h" namespace v8 { namespace internal { namespace torque { namespace { base::Optional<std::string> ReadFile(const std::string& path) { std::ifstream file_stream(path); if (!file_stream.good()) return base::nullopt; return std::string{std::istreambuf_iterator<char>(file_stream), std::istreambuf_iterator<char>()}; } void ReadAndParseTorqueFile(const std::string& path) { SourceId source_id = SourceFileMap::AddSource(path); CurrentSourceFile::Scope source_id_scope(source_id); // path might be either a normal file path or an encoded URI. auto maybe_content = ReadFile(SourceFileMap::AbsolutePath(source_id)); if (!maybe_content) { if (auto maybe_path = FileUriDecode(path)) { maybe_content = ReadFile(*maybe_path); } } if (!maybe_content) { Error("Cannot open file path/uri: ", path).Throw(); } ParseTorque(*maybe_content); } void CompileCurrentAst(TorqueCompilerOptions options) { GlobalContext::Scope global_context(std::move(CurrentAst::Get())); if (options.collect_language_server_data) { GlobalContext::SetCollectLanguageServerData(); } if (options.force_assert_statements) { GlobalContext::SetForceAssertStatements(); } TargetArchitecture::Scope target_architecture(options.force_32bit_output); TypeOracle::Scope type_oracle; // Two-step process of predeclaration + resolution allows to resolve type // declarations independent of the order they are given. PredeclarationVisitor::Predeclare(GlobalContext::ast()); PredeclarationVisitor::ResolvePredeclarations(); // Process other declarations. DeclarationVisitor::Visit(GlobalContext::ast()); // A class types' fields are resolved here, which allows two class fields to // mutually refer to each others. TypeOracle::FinalizeAggregateTypes(); std::string output_directory = options.output_directory; ImplementationVisitor implementation_visitor; implementation_visitor.SetDryRun(output_directory.length() == 0); implementation_visitor.GenerateInstanceTypes(output_directory); implementation_visitor.BeginCSAFiles(); implementation_visitor.VisitAllDeclarables(); ReportAllUnusedMacros(); implementation_visitor.GenerateBuiltinDefinitionsAndInterfaceDescriptors( output_directory); implementation_visitor.GenerateClassFieldOffsets(output_directory); implementation_visitor.GenerateBitFields(output_directory); implementation_visitor.GeneratePrintDefinitions(output_directory); implementation_visitor.GenerateClassDefinitions(output_directory); implementation_visitor.GenerateClassVerifiers(output_directory); implementation_visitor.GenerateClassDebugReaders(output_directory); implementation_visitor.GenerateEnumVerifiers(output_directory); implementation_visitor.GenerateBodyDescriptors(output_directory); implementation_visitor.GenerateExportedMacrosAssembler(output_directory); implementation_visitor.GenerateCSATypes(output_directory); implementation_visitor.EndCSAFiles(); implementation_visitor.GenerateImplementation(output_directory); if (GlobalContext::collect_language_server_data()) { LanguageServerData::SetGlobalContext(std::move(GlobalContext::Get())); LanguageServerData::SetTypeOracle(std::move(TypeOracle::Get())); } } } // namespace TorqueCompilerResult CompileTorque(const std::string& source, TorqueCompilerOptions options) { SourceFileMap::Scope source_map_scope(options.v8_root); CurrentSourceFile::Scope no_file_scope( SourceFileMap::AddSource("dummy-filename.tq")); CurrentAst::Scope ast_scope; TorqueMessages::Scope messages_scope; LanguageServerData::Scope server_data_scope; TorqueCompilerResult result; try { ParseTorque(source); CompileCurrentAst(options); } catch (TorqueAbortCompilation&) { // Do nothing. The relevant TorqueMessage is part of the // TorqueMessages contextual. } result.source_file_map = SourceFileMap::Get(); result.language_server_data = std::move(LanguageServerData::Get()); result.messages = std::move(TorqueMessages::Get()); return result; } TorqueCompilerResult CompileTorque(std::vector<std::string> files, TorqueCompilerOptions options) { SourceFileMap::Scope source_map_scope(options.v8_root); CurrentSourceFile::Scope unknown_source_file_scope(SourceId::Invalid()); CurrentAst::Scope ast_scope; TorqueMessages::Scope messages_scope; LanguageServerData::Scope server_data_scope; TorqueCompilerResult result; try { for (const auto& path : files) { ReadAndParseTorqueFile(path); } CompileCurrentAst(options); } catch (TorqueAbortCompilation&) { // Do nothing. The relevant TorqueMessage is part of the // TorqueMessages contextual. } result.source_file_map = SourceFileMap::Get(); result.language_server_data = std::move(LanguageServerData::Get());<|fim▁hole|> } // namespace torque } // namespace internal } // namespace v8<|fim▁end|>
result.messages = std::move(TorqueMessages::Get()); return result; }
<|file_name|>pluginregistry.cpp<|end_file_name|><|fim▁begin|>/* PostMonster, universal HTTP automation tool * Copyright (C) 2015 by Paul Artsishevsky <[email protected]> * * This file is part of PostMonster. * * PostMonster is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * PostMonster is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with PostMonster. If not, see <http://www.gnu.org/licenses/>. */ #include "pluginregistry.h" #include <QSettings> #include <QPluginLoader> #include <QDir> PluginRegistry &PluginRegistry::instance() { static PluginRegistry m_instance; return m_instance; } void PluginRegistry::loadPlugins(const PostMonster::APIFunctions &api) { QDir pluginsDir = QDir::current(); pluginsDir.cd("plugins"); QSettings settings; int size = settings.beginReadArray("Plugins"); // Load static plugins QHash<QObject *, QPair<QJsonObject, QPluginLoader *> > plugins; foreach (const QStaticPlugin &plugin, QPluginLoader::staticPlugins()) { QObject *pluginInstance = plugin.instance(); plugins[pluginInstance] = QPair<QJsonObject, QPluginLoader *>(plugin.metaData(), nullptr); } // Load dynamic plugins for (int i = 0; i < size; ++i) { settings.setArrayIndex(i); QPluginLoader *loader = new QPluginLoader(pluginsDir.absoluteFilePath( settings.value("filename").toString())); QObject *pluginInstance = loader->instance(); plugins[pluginInstance] = QPair<QJsonObject, QPluginLoader *>(loader->metaData(), loader); } // Activate plugins QHashIterator<QObject *, QPair<QJsonObject, QPluginLoader *>> i(plugins); while (i.hasNext()) { i.next(); QObject *instance = i.key(); QJsonObject metaData = i.value().first["MetaData"].toObject(); QPluginLoader *loader = i.value().second; PostMonster::ToolPluginInterface *tool = qobject_cast<PostMonster::ToolPluginInterface *>(instance); if (tool) { PluginData *pluginData = new PluginData; pluginData->type = PostMonster::Tool; pluginData->instance = tool; pluginData->loader = loader; pluginData->info = metaData; //TODO Check for existent plugins with the same id m_plugins[metaData["id"].toString()] = pluginData; m_info[tool] = &pluginData->info; // Add tool plugins to toolbar tool->load(api); emit toolPluginLoaded(instance); } } } PostMonster::ToolPluginInterface *PluginRegistry::tool(const QString &name) {<|fim▁hole|> if (m_plugins.contains(name) && m_plugins[name]->type == PostMonster::Tool) { return dynamic_cast<PostMonster::ToolPluginInterface *>(m_plugins[name]->instance); } return nullptr; } const QJsonObject &PluginRegistry::info(const PostMonster::PluginInterface *plugin) { return *m_info[plugin]; } const QList<PluginRegistry::PluginData *> PluginRegistry::plugins(PostMonster::PluginType type) { QList<PluginData *> result; foreach (PluginData *plugin, m_plugins.values()) { if (plugin->type & type) result << plugin; } return result; } PluginRegistry::~PluginRegistry() { qDeleteAll(m_plugins.values()); }<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 Colin Sherratt // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. extern crate time; extern crate glfw; extern crate "rustc-serialize" as rustc_serialize; extern crate nice_glfw; extern crate ovr; extern crate collect; extern crate libc; use std::sync::Arc; use std::sync::mpsc::Receiver; #[cfg(target_os="linux")] use libc::c_void; use glfw::{Glfw, Context, RenderContext}; use glfw::WindowMode::{Windowed, FullScreen}; use collect::TrieMap; pub use input::{ Button, Event, WindowEvent, EventGroup }; mod input; pub type WindowId = usize; struct WindowHandle { window: glfw::Window, forced_event: Option<glfw::WindowEvent>, receiver: Receiver<(f64, glfw::WindowEvent)>, title: String } pub struct IOManager { glfw: Glfw, ovr: Option<ovr::Ovr>, windows: TrieMap<WindowHandle>, window_id: usize } fn create_window_context(glfw :&mut Glfw, width: u32, height: u32, name: &str, mode: glfw::WindowMode) -> Option<(glfw::Window, Receiver<(f64, glfw::WindowEvent)>)> { nice_glfw::WindowBuilder::new(glfw) .try_modern_context_hints() .size(width, height) .title(name) .mode(mode) .create() } impl IOManager { pub fn new(glfw: glfw::Glfw) -> IOManager { IOManager { glfw: glfw, ovr: None, windows: TrieMap::new(), window_id: 0 } } fn add_window(&mut self, window: glfw::Window, recv: Receiver<(f64, glfw::WindowEvent)>) -> InputHandle { let id = self.window_id; self.window_id += 1; let (w, h) = window.get_framebuffer_size(); self.windows.insert(id, { WindowHandle { window: window, forced_event: Some(glfw::WindowEvent::FramebufferSize(w, h)), receiver: recv, title: "snowmew".to_string() } }); InputHandle{ handle: id } } pub fn window(&mut self, size: (u32, u32)) -> Option<Window> { let (width, height) = size; let win_opt = create_window_context(&mut self.glfw, width, height, "Snowmew", Windowed); let (mut window, events) = match win_opt { Some((window, events)) => (window, events), None => return None }; self.glfw.set_swap_interval(1); window.set_all_polling(true); window.show(); let version = window.get_context_version(); let rc = window.render_context(); let handle = self.add_window(window, events); Some(Window { handle: handle, render: rc, version: (version.major, version.minor), hmd: None, os_spec: WindowOSSpec::new(&self.glfw) }) } pub fn primary(&mut self, size: (u32, u32)) -> Option<Window> { let screen = { self.glfw.with_primary_monitor(|glfw, display| { let display = display.unwrap(); let (width, height) = size; create_window_context(glfw, width, height, "Snowmew FullScreen", FullScreen(display)) }) }; match screen { None => None, Some((mut window, events)) => { window.set_all_polling(true); window.show(); let version = window.get_context_version(); let rc = window.render_context(); let handle = self.add_window(window, events); Some(Window { handle: handle, render: rc, version: (version.major, version.minor), hmd: None, os_spec: WindowOSSpec::new(&self.glfw) }) } } } pub fn get_primary_resolution(&mut self) -> (u32, u32) { self.glfw.with_primary_monitor(|_, display| { let display = display.expect("Could not get primnay display"); let vm = display.get_video_mode().expect("Could not get video mode"); (vm.width, vm.height) }) } pub fn get_primary_position(&mut self) -> (i32, i32) { self.glfw.with_primary_monitor(|_, display| { let display = display.expect("Could not get primnay display"); display.get_pos() }) } #[cfg(target_os="linux")] fn create_hmd_window(&mut self, hmd: &ovr::HmdDescription) -> Option<(glfw::Window, Receiver<(f64, glfw::WindowEvent)>)> { let window = self.glfw.with_connected_monitors(|glfw, monitors| { for m in monitors.iter() { let (x, y) = m.get_pos(); if x == hmd.window_position.x && y == hmd.window_position.y { let (width, height) = (hmd.resolution.x, hmd.resolution.y); let win_opt = create_window_context(glfw, width as u32, height as u32, "Snowmew FullScreen", FullScreen(m)); let (window, events) = match win_opt { Some((window, events)) => (window, events), None => return None }; return Some((window, events)); } } None }); if window.is_none() { // fallback if we could not guess at the screen let (width, height) = (hmd.resolution.x, hmd.resolution.y); let win_opt = self.glfw.create_window(width as u32, height as u32, "Snowmew", Windowed); let (mut window, events) = match win_opt { Some((window, events)) => (window, events), None => return None }; // move viewport let (dx, dy) = (hmd.window_position.x, hmd.window_position.y); window.set_pos(dx as i32, dy as i32); Some((window, events)) } else { window } } #[cfg(target_os="macos")] fn create_hmd_window(&mut self, hmd: &ovr::HmdDescription) -> Option<(glfw::Window, Receiver<(f64, glfw::WindowEvent)>)> { self.glfw.with_connected_monitors(|glfw, monitors| { for m in monitors.iter() { if !m.get_name().contains("Rift") { continue; } let (width, height) = (hmd.resolution.x, hmd.resolution.y); let win_opt = create_window_context(glfw, width as u32, height as u32, "Snowmew FullScreen", FullScreen(m)); let (window, events) = match win_opt { Some((window, events)) => (window, events), None => return None }; <|fim▁hole|> } None }) } pub fn hmd(&mut self) -> Option<Window> { if !self.setup_ovr() { return None; } let (window, events, rc, hmd) = { let hmd = match self.ovr.as_ref().unwrap().first_hmd() { Some(hmd) => hmd, None => return None }; let hmdinfo = hmd.get_description(); let (mut window, events) = match self.create_hmd_window(&hmdinfo) { Some((window, events)) => (window, events), None => return None }; window.set_all_polling(true); window.show(); let rc = window.render_context(); (window, events, rc, hmd) }; let version = window.get_context_version(); let handle = self.add_window(window, events); Some(Window { handle: handle, render: rc, version: (version.major, version.minor), hmd: Some(Arc::new(hmd)), os_spec: WindowOSSpec::new(&self.glfw) }) } pub fn wait(&mut self) { self.glfw.wait_events(); } pub fn poll(&mut self) { self.glfw.poll_events(); } pub fn next_event(&mut self, handle: &InputHandle) -> input::EventGroup { let evt = self.windows.get_mut(&handle.handle) .map(|rx| { // this is a hack to inject the correct size into the event buffer match rx.forced_event.take() { Some(evt) => return input::Event::from_glfw(evt), None => () }; for (_, evt) in glfw::flush_messages(&rx.receiver) { let evt = input::Event::from_glfw(evt); if evt != input::EventGroup::Nop { return evt; } } input::EventGroup::Nop }); match evt { Some(e) => e, _ => input::EventGroup::Nop } } pub fn should_close(&mut self, handle: &InputHandle) -> bool { let should_close = self.windows.get_mut(&handle.handle) .map(|win| win.window.should_close()); if let Some(x) = should_close { x } else { true } } pub fn set_title(&mut self, handle: &InputHandle, title: String) { self.windows.get_mut(&handle.handle) .map(|win| { if title != win.title { win.window.set_title(&title); win.title = title.clone(); } }); } fn setup_ovr(&mut self) -> bool { if self.ovr.is_some() && self.ovr.as_ref().unwrap().detect() > 0 { return true; } if self.ovr.is_none() { self.ovr = ovr::Ovr::init(); } self.ovr.is_some() && self.ovr.as_ref().unwrap().detect() > 0 } pub fn set_window_position(&mut self, window: &Window, pos: (i32, i32)) { let (w, h) = pos; match self.windows.get_mut(&window.handle.handle) { Some(win) => win.window.set_pos(w, h), None => () } } pub fn get_framebuffer_size(&mut self, window: &Window) -> (i32, i32) { match self.windows.get_mut(&window.handle.handle) { Some(win) => win.window.get_framebuffer_size(), None => (0, 0) } } pub fn get_proc_address(&self, name: &str) -> *const ::libc::c_void { self.glfw.get_proc_address_raw(name) } } #[derive(Clone, Copy)] pub struct InputHandle { handle: usize, } #[cfg(target_os="macos")] struct WindowOSSpec; #[cfg(target_os="macos")] impl WindowOSSpec { fn new(_: &Glfw) -> WindowOSSpec {WindowOSSpec} } #[cfg(target_os="linux")] struct WindowOSSpec { display: *mut c_void } #[cfg(target_os="linux")] impl WindowOSSpec { fn new(glfw: &Glfw) -> WindowOSSpec { WindowOSSpec { display: glfw.get_x11_display() } } } unsafe impl Send for WindowOSSpec {} pub struct Window { handle: InputHandle, render: RenderContext, version: (u64, u64), hmd: Option<Arc<ovr::Hmd>>, os_spec: WindowOSSpec } impl Window { pub fn swap_buffers(&mut self) { self.render.swap_buffers() } pub fn make_context_current(&mut self) { self.render.make_current() } pub fn get_context_version(&self) -> (u64, u64) { self.version } pub fn handle(&self) -> InputHandle { self.handle.clone() } pub fn is_hmd(&self) -> bool { self.hmd.is_some() } pub fn get_hmd<'a>(&'a self) -> Arc<ovr::Hmd> { self.hmd.as_ref().expect("no hmd device found!").clone() } /// Wrapper for `glfwGetGLXContext` #[cfg(target_os="linux")] pub fn get_x11_display(&self) -> *mut c_void { self.os_spec.display } } #[derive(Clone, RustcEncodable, RustcDecodable)] pub struct IoState { pub render_size: (u32, u32), pub size: (u32, u32), pub position: (i32, i32), pub show_mouse: bool, pub mouse_over: bool, pub window_title: String } impl IoState { pub fn new() -> IoState { IoState { render_size: (800, 600), size: (800, 600), position: (0, 0), show_mouse: true, mouse_over: false, window_title: "snowmew".to_string() } } pub fn window_action(&mut self, win: input::WindowEvent) { match win { input::WindowEvent::Size(x, y) => { self.size = (x, y); } input::WindowEvent::Position(x, y) => { self.position = (x, y); } input::WindowEvent::MouseOver(mouse) => { self.mouse_over = mouse; } } } } pub trait GetIoState { /// Apply an `WindowEvent` to the system, this will update /// the io metadata (io_state) fn window_action(&mut self, evt: input::WindowEvent) { self.get_io_state_mut().window_action(evt); } /// Read the io metadata fn get_io_state(&self) -> &IoState; /// write to the io metadata fn get_io_state_mut(&mut self) -> &mut IoState; } #[derive(Copy)] /// Used to configure how a window should be created for the game pub struct DisplayConfig { /// The resolution in pixels (width, height) /// if not set the engine will do a best guess pub resolution: Option<(u32, u32)>, /// The position of the window, if not set the window /// will be placed at the best guess for the engine pub position: Option<(i32, i32)>, /// Enable HMD for Oculus Rift support, Only supported by the AZDO backend pub hmd: bool, /// Should the window be created as a window instead of fullscreen. pub window: bool, } impl DisplayConfig { pub fn create_display(&self, im: &mut IOManager) -> Option<Window> { let window = if self.hmd { im.hmd() } else { None }; if window.is_some() { return window; } let resolution = match self.resolution { Some(res) => res, None => im.get_primary_resolution() }; let position = match self.position { Some(pos) => pos, None => im.get_primary_position() }; if !self.window { im.primary(resolution) } else { let win = im.window(resolution); match win { Some(win) => { im.set_window_position(&win, position); Some(win) } None => None } } } }<|fim▁end|>
return Some((window, events));
<|file_name|>test.js<|end_file_name|><|fim▁begin|>const vboxm = require('./vboxm.js'); //vboxm.clone('8a07a800-4bef-4cc7-9581-6d03a2fab45f', 'vboxm.cloneテスト'); // vboxm.delete('db7cd51b-ac5b-4915-859e-20b88bcf68c2'); <|fim▁hole|>console.log(regResult);<|fim▁end|>
let name = 'aiueo,kakikukeko???kdfajlsd'; let regResult = name.match(/^[a-zA-Z0-9!\(\)-=^~\\|@`\[{;+:*\]},<.>/?\_ ]+$/);
<|file_name|>lexer.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{errors::*, parser::syntax::make_loc, FileCommentMap, MatchedFileCommentMap}; use codespan::{ByteIndex, Span}; use move_ir_types::location::Loc; use std::{collections::BTreeMap, fmt}; #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum Tok { EOF, AddressValue, NumValue, U8Value, U64Value, U128Value, ByteStringValue, IdentifierValue, Exclaim, ExclaimEqual, Percent, Amp, AmpAmp, AmpMut, LParen, RParen, LBracket, RBracket, Star, Plus, Comma, Minus, Period, PeriodPeriod, Slash, Colon, ColonColon, Semicolon, Less, LessEqual, LessLess, Equal, EqualEqual, EqualEqualGreater, Greater, GreaterEqual, GreaterGreater, Caret, Abort, Acquires, As, Break, Continue, Copy, Copyable, Define, Else, False, If, Invariant, Let, Loop, Module, Move, Native, Public, Resource, Return, Spec, Struct, True, Use, While, LBrace, Pipe, PipePipe, RBrace, Fun, Script, Const, Friend, } impl fmt::Display for Tok { fn fmt<'f>(&self, formatter: &mut fmt::Formatter<'f>) -> Result<(), fmt::Error> { use Tok::*; let s = match *self { EOF => "[end-of-file]", AddressValue => "[Address]", NumValue => "[Num]", U8Value => "[U8]", U64Value => "[U64]", U128Value => "[U128]", ByteStringValue => "[ByteString]", IdentifierValue => "[Identifier]", Exclaim => "!", ExclaimEqual => "!=", Percent => "%", Amp => "&", AmpAmp => "&&", AmpMut => "&mut", LParen => "(", RParen => ")", LBracket => "[", RBracket => "]", Star => "*", Plus => "+", Comma => ",", Minus => "-", Period => ".", PeriodPeriod => "..", Slash => "/", Colon => ":", ColonColon => "::", Semicolon => ";", Less => "<", LessEqual => "<=", LessLess => "<<", Equal => "=", EqualEqual => "==", EqualEqualGreater => "==>", Greater => ">", GreaterEqual => ">=", GreaterGreater => ">>", Caret => "^", Abort => "abort", Acquires => "acquires", As => "as", Break => "break", Continue => "continue", Copy => "copy", Copyable => "copyable", Define => "define", Else => "else", False => "false", If => "if", Invariant => "invariant", Let => "let", Loop => "loop", Module => "module", Move => "move", Native => "native", Public => "public", Resource => "resource", Return => "return", Spec => "spec", Struct => "struct", True => "true", Use => "use", While => "while", LBrace => "{", Pipe => "|", PipePipe => "||", RBrace => "}", Fun => "fun", Script => "script", Const => "const", Friend => "friend", }; fmt::Display::fmt(s, formatter) } } pub struct Lexer<'input> { text: &'input str, file: &'static str, doc_comments: FileCommentMap, matched_doc_comments: MatchedFileCommentMap, prev_end: usize, cur_start: usize, cur_end: usize, token: Tok, } impl<'input> Lexer<'input> { pub fn new( text: &'input str, file: &'static str, doc_comments: BTreeMap<Span, String>, ) -> Lexer<'input> { Lexer { text, file, doc_comments, matched_doc_comments: BTreeMap::new(), prev_end: 0, cur_start: 0, cur_end: 0, token: Tok::EOF, } } pub fn peek(&self) -> Tok { self.token } pub fn content(&self) -> &str { &self.text[self.cur_start..self.cur_end] } pub fn file_name(&self) -> &'static str { self.file } pub fn start_loc(&self) -> usize { self.cur_start } pub fn previous_end_loc(&self) -> usize { self.prev_end } // Look ahead to the next token after the current one and return it without advancing // the state of the lexer. pub fn lookahead(&self) -> Result<Tok, Error> { let text = self.text[self.cur_end..].trim_start(); let offset = self.text.len() - text.len(); let (tok, _) = find_token(self.file, text, offset)?; Ok(tok) } // Look ahead to the next two tokens after the current one and return them without advancing // the state of the lexer. pub fn lookahead2(&self) -> Result<(Tok, Tok), Error> { let text = self.text[self.cur_end..].trim_start(); let offset = self.text.len() - text.len(); let (first, length) = find_token(self.file, text, offset)?; let text2 = self.text[offset + length..].trim_start(); let offset2 = self.text.len() - text2.len(); let (second, _) = find_token(self.file, text2, offset2)?; Ok((first, second)) } // Matches the doc comments after the last token (or the beginning of the file) to the position // of the current token. This moves the comments out of `doc_comments` and // into `matched_doc_comments`. At the end of parsing, if `doc_comments` is not empty, errors // for stale doc comments will be produced. // // Calling this function during parsing effectively marks a valid point for documentation // comments. The documentation comments are not stored in the AST, but can be retrieved by // using the start position of an item as an index into `matched_doc_comments`. pub fn match_doc_comments(&mut self) { let start = self.previous_end_loc() as u32; let end = self.cur_start as u32; let mut matched = vec![]; let merged = self .doc_comments .range(Span::new(start, start)..Span::new(end, end)) .map(|(span, s)| { matched.push(*span); s.clone() }) .collect::<Vec<String>>() .join("\n"); for span in matched { self.doc_comments.remove(&span); } self.matched_doc_comments.insert(ByteIndex(end), merged); } // At the end of parsing, checks whether there are any unmatched documentation comments, // producing errors if so. Otherwise returns a map from file position to associated // documentation. pub fn check_and_get_doc_comments(&mut self) -> Result<MatchedFileCommentMap, Errors> { let errors = self .doc_comments .iter() .map(|(span, _)| { vec![( Loc::new(self.file, *span), "documentation comment cannot be matched to a language item".to_string(), )] }) .collect::<Errors>(); if errors.is_empty() { Ok(std::mem::take(&mut self.matched_doc_comments)) } else { Err(errors) } } pub fn advance(&mut self) -> Result<(), Error> { self.prev_end = self.cur_end; let text = self.text[self.cur_end..].trim_start(); self.cur_start = self.text.len() - text.len(); let (token, len) = find_token(self.file, text, self.cur_start)?; self.cur_end = self.cur_start + len; self.token = token; Ok(()) } // Replace the current token. The lexer will always match the longest token, // but sometimes the parser will prefer to replace it with a shorter one, // e.g., ">" instead of ">>". pub fn replace_token(&mut self, token: Tok, len: usize) { self.token = token; self.cur_end = self.cur_start + len } } // Find the next token and its length without changing the state of the lexer. fn find_token(file: &'static str, text: &str, start_offset: usize) -> Result<(Tok, usize), Error> { let c: char = match text.chars().next() { Some(next_char) => next_char, None => { return Ok((Tok::EOF, 0)); } }; let (tok, len) = match c { '0'..='9' => { if text.starts_with("0x") && text.len() > 2 { let hex_len = get_hex_digits_len(&text[2..]); if hex_len == 0 { // Fall back to treating this as a "0" token. (Tok::NumValue, 1) } else { (Tok::AddressValue, 2 + hex_len) } } else { get_decimal_number(&text) } } 'A'..='Z' | 'a'..='z' | '_' => {<|fim▁hole|> if text.starts_with("x\"") || text.starts_with("b\"") { let line = &text.lines().next().unwrap()[2..]; match get_string_len(line) { Some(last_quote) => (Tok::ByteStringValue, 2 + last_quote + 1), None => { return Err(vec![( make_loc(file, start_offset, start_offset + line.len() + 2), "Missing closing quote (\") after byte string".to_string(), )]) } } } else { let len = get_name_len(&text); (get_name_token(&text[..len]), len) } } '&' => { if text.starts_with("&mut ") { (Tok::AmpMut, 5) } else if text.starts_with("&&") { (Tok::AmpAmp, 2) } else { (Tok::Amp, 1) } } '|' => { if text.starts_with("||") { (Tok::PipePipe, 2) } else { (Tok::Pipe, 1) } } '=' => { if text.starts_with("==>") { (Tok::EqualEqualGreater, 3) } else if text.starts_with("==") { (Tok::EqualEqual, 2) } else { (Tok::Equal, 1) } } '!' => { if text.starts_with("!=") { (Tok::ExclaimEqual, 2) } else { (Tok::Exclaim, 1) } } '<' => { if text.starts_with("<=") { (Tok::LessEqual, 2) } else if text.starts_with("<<") { (Tok::LessLess, 2) } else { (Tok::Less, 1) } } '>' => { if text.starts_with(">=") { (Tok::GreaterEqual, 2) } else if text.starts_with(">>") { (Tok::GreaterGreater, 2) } else { (Tok::Greater, 1) } } ':' => { if text.starts_with("::") { (Tok::ColonColon, 2) } else { (Tok::Colon, 1) } } '%' => (Tok::Percent, 1), '(' => (Tok::LParen, 1), ')' => (Tok::RParen, 1), '[' => (Tok::LBracket, 1), ']' => (Tok::RBracket, 1), '*' => (Tok::Star, 1), '+' => (Tok::Plus, 1), ',' => (Tok::Comma, 1), '-' => (Tok::Minus, 1), '.' => { if text.starts_with("..") { (Tok::PeriodPeriod, 2) } else { (Tok::Period, 1) } } '/' => (Tok::Slash, 1), ';' => (Tok::Semicolon, 1), '^' => (Tok::Caret, 1), '{' => (Tok::LBrace, 1), '}' => (Tok::RBrace, 1), _ => { let loc = make_loc(file, start_offset, start_offset); return Err(vec![(loc, format!("Invalid character: '{}'", c))]); } }; Ok((tok, len)) } // Return the length of the substring matching [a-zA-Z0-9_]. Note that // this does not do any special check for whether the first character // starts with a number, so the caller is responsible for any additional // checks on the first character. fn get_name_len(text: &str) -> usize { text.chars() .position(|c| !matches!(c, 'a'..='z' | 'A'..='Z' | '_' | '0'..='9')) .unwrap_or_else(|| text.len()) } fn get_decimal_number(text: &str) -> (Tok, usize) { let len = text .chars() .position(|c| !matches!(c, '0'..='9')) .unwrap_or_else(|| text.len()); let rest = &text[len..]; if rest.starts_with("u8") { (Tok::U8Value, len + 2) } else if rest.starts_with("u64") { (Tok::U64Value, len + 3) } else if rest.starts_with("u128") { (Tok::U128Value, len + 4) } else { (Tok::NumValue, len) } } // Return the length of the substring containing characters in [0-9a-fA-F]. fn get_hex_digits_len(text: &str) -> usize { text.find(|c| !matches!(c, 'a'..='f' | 'A'..='F' | '0'..='9')) .unwrap_or_else(|| text.len()) } // Return the length of the quoted string, or None if there is no closing quote. fn get_string_len(text: &str) -> Option<usize> { let mut pos = 0; let mut iter = text.chars(); while let Some(chr) = iter.next() { if chr == '\\' { // Skip over the escaped character (e.g., a quote or another backslash) if iter.next().is_some() { pos += 1; } } else if chr == '"' { return Some(pos); } pos += 1; } None } fn get_name_token(name: &str) -> Tok { match name { "abort" => Tok::Abort, "acquires" => Tok::Acquires, "as" => Tok::As, "break" => Tok::Break, "const" => Tok::Const, "continue" => Tok::Continue, "copy" => Tok::Copy, "copyable" => Tok::Copyable, "define" => Tok::Define, "else" => Tok::Else, "false" => Tok::False, "fun" => Tok::Fun, "friend" => Tok::Friend, "if" => Tok::If, "invariant" => Tok::Invariant, "let" => Tok::Let, "loop" => Tok::Loop, "module" => Tok::Module, "move" => Tok::Move, "native" => Tok::Native, "public" => Tok::Public, "resource" => Tok::Resource, "return" => Tok::Return, "script" => Tok::Script, "spec" => Tok::Spec, "struct" => Tok::Struct, "true" => Tok::True, "use" => Tok::Use, "while" => Tok::While, _ => Tok::IdentifierValue, } }<|fim▁end|>
<|file_name|>output_dependencies.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # import csv, sys, os.path def error(message): sys.stderr.write(message + '\n') sys.exit(1) def cygpath_from_winpath(winabspath): return("/cygdrive/"+winabspath[0].lower()+winabspath[2:].replace('\\','/')) def main(argv): if len(argv)!=(3+1) and len(argv)!=(4+1): error('usage: %s root_WPID procmon_log.csv proj_base_path [dep_file]' % os.path.basename(argv[0])) remove_deleted_files=1 proc_set = [argv[1]] log_fname = argv[2] proj_base = argv[3] if proj_base.endswith('\\'): proj_base=proj_base[:-1] # don't use set because I want a consistent order. init_read_files = [] ever_write_files = [] # read the string data with open(log_fname, 'r') as f: reader = csv.DictReader(f, delimiter=',') for line in reader: #create the process tree (start in Detail (PPID) column) if line["Operation"].startswith('Process Start') and line["Parent PID"] in proc_set: proc_set.append(line["PID"]) continue #Then filter events by the WPID tree. if line["Operation"].startswith('ReadFile') and line["PID"] in proc_set and line["Path"].startswith(proj_base): #fname = line["Path"][(len(proj_base)+1):] fname = line["Path"] if fname not in ever_write_files and fname not in init_read_files: init_read_files.append(fname) if line["Operation"].startswith('WriteFile') and line["PID"] in proc_set and line["Path"].startswith(proj_base): #fname = line["Path"][(len(proj_base)+1):] fname = line["Path"] if fname not in ever_write_files: ever_write_files.append(fname) if remove_deleted_files: init_read_files = [fname for fname in init_read_files if os.path.isfile(fname)] ever_write_files = [fname for fname in ever_write_files if os.path.isfile(fname)] #Output the information if len(argv)==(4+1): outfile = argv[4] with open(outfile, "w") as dep_file: for fname in ever_write_files: fname_rel = os.path.relpath(cygpath_from_winpath(fname)) dep_file.write(fname_rel + ' ') dep_file.write(' : ')<|fim▁hole|> for fname in init_read_files: fname_rel = os.path.relpath(cygpath_from_winpath(fname)) dep_file.write(' ' + fname_rel) dep_file.write('\n') else: print 'Project files initially read:' for fname in init_read_files: fname_rel = os.path.relpath(fname) print fname_rel print '' print 'Project files ever written:' for fname in ever_write_files: fname_rel = os.path.relpath(fname) print fname_rel if __name__ =='__main__': main(sys.argv)<|fim▁end|>
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # partpy documentation build configuration file, created by # sphinx-quickstart on Sat Feb 16 18:56:06 2013. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.doctest'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'partpy' copyright = u'2013, Taylor "Nekroze" Lawson' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '1.2' # The full version, including alpha/beta/rc tags. release = '1.2.4' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation".<|fim▁hole|>#html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'partpydoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'partpy.tex', u'partpy Documentation', u'Taylor "Nekroze" Lawson', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'partpy', u'partpy Documentation', [u'Taylor "Nekroze" Lawson'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'partpy', u'partpy Documentation', u'Taylor "Nekroze" Lawson', 'partpy', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'partpy' epub_author = u'Taylor "Nekroze" Lawson' epub_publisher = u'Taylor "Nekroze" Lawson' epub_copyright = u'2013, Taylor "Nekroze" Lawson' # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # A tuple containing the cover image and cover page html template filenames. #epub_cover = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. #epub_exclude_files = [] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import logging from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext as _ from vkontakte_api.models import VkontakteManager, VkontaktePKModel from .mixins import ParseGroupsMixin, PhotableModelMixin, UserableModelMixin, VideoableModelMixin log = logging.getLogger('vkontakte_groups') GROUP_TYPE_CHOICES = ( ('group', u'Группа'), ('page', u'Страница'), ('event', u'Событие'), ) class CheckMembersCountFailed(Exception): pass class GroupRemoteManager(VkontakteManager): def api_call(self, *args, **kwargs): if 'ids' in kwargs: kwargs['group_ids'] = ','.join(map(lambda i: str(i), kwargs.pop('ids'))) return super(GroupRemoteManager, self).api_call(*args, **kwargs) def search(self, q, offset=None, count=None): kwargs = {'q': q} if offset: kwargs.update(offset=offset) if count: kwargs.update(count=count) return self.get(method='search', **kwargs) def fetch(self, *args, **kwargs): """ Add additional fields to parent fetch request """ if 'fields' not in kwargs: kwargs['fields'] = 'members_count' return super(GroupRemoteManager, self).fetch(*args, **kwargs) def get_members_ids(self, group, check_count=True, **kwargs): ids = set() attempts = 0 kwargs['offset'] = 0 kwargs['group_id'] = group.remote_id while True: response = self.api_call('get_members', **kwargs) ids_iteration = response.get('items', []) for user_id in ids_iteration: ids.add(int(user_id)) ids_iteration_count = len(ids_iteration) ids_count = len(ids) log.debug('Get members of group %s. Got %s, total %s, actual ammount %s, offset %s' % ( group, ids_iteration_count, ids_count, group.members_count, kwargs['offset'])) if ids_iteration_count != 0: attempts = 0 kwargs['offset'] += ids_iteration_count else: try: if check_count: self.check_members_count(group, ids_count) break except CheckMembersCountFailed as e: attempts += 1 if attempts <= 5: log.warning('%s, offset %s, attempts %s' % (e, kwargs['offset'], attempts)) continue else: log.error(e) raise return list(ids) def check_members_count(self, group, count): if group.members_count and count > 0: division = float(group.members_count) / count if 0.99 > division or 1.01 < division: raise CheckMembersCountFailed("Suspicious ammount of members fetched for group %s. " "Actual ammount is %d, fetched %d, division is %s" % ( group, group.members_count, count, division)) @python_2_unicode_compatible class Group(PhotableModelMixin, VideoableModelMixin, UserableModelMixin, VkontaktePKModel): resolve_screen_name_types = ['group', 'page', 'event'] slug_prefix = 'club' name = models.CharField(max_length=800) screen_name = models.CharField(u'Короткое имя группы', max_length=50, db_index=True) is_closed = models.NullBooleanField(u'Флаг закрытой группы') is_admin = models.NullBooleanField(u'Пользователь является администратором') members_count = models.IntegerField(u'Всего участников', null=True) verified = models.NullBooleanField(u'Флаг официальной группы') type = models.CharField(u'Тип объекта', max_length=10, choices=GROUP_TYPE_CHOICES) photo = models.URLField() photo_big = models.URLField() photo_medium = models.URLField() remote = GroupRemoteManager(remote_pk=('remote_id',), methods_namespace='groups', version=5.28, methods={ 'get': 'getById', 'search': 'search', 'get_members': 'getMembers', }) class Meta: verbose_name = _('Vkontakte group') verbose_name_plural = _('Vkontakte groups') def __str__(self): return self.name @property def refresh_kwargs(self): return {'ids': [self.remote_id]} @property def wall_comments(self): if 'vkontakte_wall' not in settings.INSTALLED_APPS: raise ImproperlyConfigured("Application 'vkontakte_wall' not in INSTALLED_APPS") from vkontakte_wall.models import Comment # TODO: improve schema and queries with using owner_id field return Comment.objects.filter(remote_id__startswith='-%s_' % self.remote_id) @property def topics_comments(self):<|fim▁hole|> # TODO: improve schema and queries with using owner_id field return Comment.objects.filter(remote_id__startswith='-%s_' % self.remote_id) def fetch_posts(self, *args, **kwargs): if 'vkontakte_wall' not in settings.INSTALLED_APPS: raise ImproperlyConfigured("Application 'vkontakte_wall' not in INSTALLED_APPS") from vkontakte_wall.models import Post return Post.remote.fetch_wall(owner=self, *args, **kwargs) def fetch_topics(self, *args, **kwargs): if 'vkontakte_board' not in settings.INSTALLED_APPS: raise ImproperlyConfigured("Application 'vkontakte_board' not in INSTALLED_APPS") from vkontakte_board.models import Topic return Topic.remote.fetch(group=self, *args, **kwargs) def fetch_statistic(self, *args, **kwargs): if 'vkontakte_groups_statistic' not in settings.INSTALLED_APPS: raise ImproperlyConfigured("Application 'vkontakte_groups_statistic' not in INSTALLED_APPS") from vkontakte_groups_statistic.models import fetch_statistic_for_group return fetch_statistic_for_group(group=self, *args, **kwargs) from . import signals<|fim▁end|>
if 'vkontakte_board' not in settings.INSTALLED_APPS: raise ImproperlyConfigured("Application 'vkontakte_board' not in INSTALLED_APPS") from vkontakte_board.models import Comment
<|file_name|>index.js<|end_file_name|><|fim▁begin|>module.exports = function(dataUri, maxDimension, callback){ var source = new Image(); source.addEventListener('load', function(){ var canvas = document.createElement('canvas'), ratio = Math.max(source.width, source.height) / maxDimension; canvas.width = source.width / ratio; canvas.height = source.height / ratio; var context = canvas.getContext('2d'); context.drawImage( source, 0, 0, source.width, source.height, 0, 0, canvas.width, canvas.height );<|fim▁hole|> callback(null, canvas.toDataURL()); }); source.src = dataUri; };<|fim▁end|>