prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>vacuum2.py<|end_file_name|><|fim▁begin|>import agents as ag def HW2Agent() -> object: "An agent that keeps track of what locations are clean or dirty." oldPercepts = [('None', 'Clean')] oldActions = ['NoOp'] actionScores = [{ 'Right': 0, 'Left': 0, 'Up': -1, 'Down': -1, 'NoOp': -100, }] level = 0 def program(percept): "Same as ReflexVacuumAgent, except if everything is clean, do NoOp." level = len(actionScores) - 1 bump, status = percept<|fim▁hole|> action = 'Suck' actionScores[level][lastAction] += 2 else: if bump == 'Bump': actionScores[level][lastAction] -= 10 else: if lastAction == 'Up' or lastAction == 'Down': actionScores.append({ 'Right': 0, 'Left': 0, 'Up': -1, 'Down': -1, }) highest = -80 for actionType, score in actionScores[level].items(): if score > highest: highest = score action = actionType print(actionScores) oldPercepts.append(percept) oldActions.append(action) return action return ag.Agent(program)<|fim▁end|>
lastBump, lastStatus = oldPercepts[-1] lastAction = oldActions[-1] if status == 'Dirty':
<|file_name|>OrganizationService.java<|end_file_name|><|fim▁begin|>/* * Powered By agile * Web Site: http://www.agile.com * Since 2008 - 2016 */ package persistent.prestige.modules.edu.service; import java.util.Map; /** * Organization service类 * @author 雅居乐 2016-9-10 22:28:24 * @version 1.0 */ public interface OrganizationService{ /** * 保存信息<|fim▁hole|> * @return */ Integer saveOrganization(Map datas); }<|fim▁end|>
* @param datas
<|file_name|>test_nsmetadata.py<|end_file_name|><|fim▁begin|>from Foundation import * from PyObjCTools.TestSupport import *<|fim▁hole|> try: unicode except NameError: unicode = str class TestNSMetaData (TestCase): def testConstants(self): self.assertIsInstance(NSMetadataQueryDidStartGatheringNotification, unicode) self.assertIsInstance(NSMetadataQueryGatheringProgressNotification, unicode) self.assertIsInstance(NSMetadataQueryDidFinishGatheringNotification, unicode) self.assertIsInstance(NSMetadataQueryDidUpdateNotification, unicode) self.assertIsInstance(NSMetadataQueryResultContentRelevanceAttribute, unicode) self.assertIsInstance(NSMetadataQueryUserHomeScope, unicode) self.assertIsInstance(NSMetadataQueryLocalComputerScope, unicode) self.assertIsInstance(NSMetadataQueryNetworkScope, unicode) @min_os_level('10.7') def testConstants10_7(self): self.assertIsInstance(NSMetadataQueryLocalDocumentsScope, unicode) self.assertIsInstance(NSMetadataQueryUbiquitousDocumentsScope, unicode) self.assertIsInstance(NSMetadataQueryUbiquitousDataScope, unicode) self.assertIsInstance(NSMetadataItemFSNameKey, unicode) self.assertIsInstance(NSMetadataItemDisplayNameKey, unicode) self.assertIsInstance(NSMetadataItemURLKey, unicode) self.assertIsInstance(NSMetadataItemPathKey, unicode) self.assertIsInstance(NSMetadataItemFSSizeKey, unicode) self.assertIsInstance(NSMetadataItemFSCreationDateKey, unicode) self.assertIsInstance(NSMetadataItemFSContentChangeDateKey, unicode) self.assertIsInstance(NSMetadataItemIsUbiquitousKey, unicode) self.assertIsInstance(NSMetadataUbiquitousItemHasUnresolvedConflictsKey, unicode) self.assertIsInstance(NSMetadataUbiquitousItemIsDownloadedKey, unicode) self.assertIsInstance(NSMetadataUbiquitousItemIsDownloadingKey, unicode) self.assertIsInstance(NSMetadataUbiquitousItemIsUploadedKey, unicode) self.assertIsInstance(NSMetadataUbiquitousItemIsUploadingKey, unicode) self.assertIsInstance(NSMetadataUbiquitousItemPercentDownloadedKey, unicode) self.assertIsInstance(NSMetadataUbiquitousItemPercentUploadedKey, unicode) def testMethods(self): self.assertResultIsBOOL(NSMetadataQuery.startQuery) self.assertResultIsBOOL(NSMetadataQuery.isStarted) self.assertResultIsBOOL(NSMetadataQuery.isGathering) self.assertResultIsBOOL(NSMetadataQuery.isStopped) if __name__ == "__main__": main()<|fim▁end|>
<|file_name|>Utilities.cpp<|end_file_name|><|fim▁begin|>/* * Author: Kiveisha Yevgeniy * Copyright (c) 2015 Intel Corporation. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION<|fim▁hole|>* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include "Utilities.h" #include <string.h> char Utilities::toHEX (char code) { return m_hex[code & 15]; } long Utilities::Escaping (char * src, char * dest) { char *pstr = src; char *pbuf = dest; long count = 0; while (*pstr) { if (isalnum(*pstr) || *pstr == '-' || *pstr == '_' || *pstr == '.' || *pstr == '~') { *pbuf++ = *pstr; count++; } else if (*pstr == ' ') { *pbuf++ = '+'; count++; } else if (*pstr == '=') { *pbuf++ = *pstr; count++; } else { *pbuf++ = '%', *pbuf++ = toHEX(*pstr >> 4), *pbuf++ = toHEX(*pstr & 15); count += 3; } pstr++; } *pbuf = '\0'; return count; } std::string Utilities::EncodeBase64 (unsigned char const* bytes_to_encode, unsigned int in_len) { std::string ret; int i = 0; int j = 0; unsigned char char_array_3[3]; unsigned char char_array_4[4]; while (in_len--) { char_array_3[i++] = *(bytes_to_encode++); if (i == 3) { char_array_4[0] = (char_array_3[0] & 0xfc) >> 2; char_array_4[1] = ((char_array_3[0] & 0x03) << 4) + ((char_array_3[1] & 0xf0) >> 4); char_array_4[2] = ((char_array_3[1] & 0x0f) << 2) + ((char_array_3[2] & 0xc0) >> 6); char_array_4[3] = char_array_3[2] & 0x3f; for(i = 0; (i <4) ; i++) { ret += m_base64_chars[char_array_4[i]]; } i = 0; } } if (i) { for(j = i; j < 3; j++) { char_array_3[j] = '\0'; } char_array_4[0] = (char_array_3[0] & 0xfc) >> 2; char_array_4[1] = ((char_array_3[0] & 0x03) << 4) + ((char_array_3[1] & 0xf0) >> 4); char_array_4[2] = ((char_array_3[1] & 0x0f) << 2) + ((char_array_3[2] & 0xc0) >> 6); char_array_4[3] = char_array_3[2] & 0x3f; for (j = 0; (j < i + 1); j++) { ret += m_base64_chars[char_array_4[j]]; } while((i++ < 3)) ret += '='; } return ret; }<|fim▁end|>
<|file_name|>MergeablePlugin.java<|end_file_name|><|fim▁begin|>package org.jvnet.jaxb2_commons.plugin.mergeable; import java.util.Arrays; import java.util.Collection; import javax.xml.namespace.QName; import org.jvnet.jaxb2_commons.lang.JAXBMergeStrategy; import org.jvnet.jaxb2_commons.lang.MergeFrom2; import org.jvnet.jaxb2_commons.lang.MergeStrategy2; import org.jvnet.jaxb2_commons.locator.ObjectLocator; import org.jvnet.jaxb2_commons.locator.util.LocatorUtils; import org.jvnet.jaxb2_commons.plugin.AbstractParameterizablePlugin; import org.jvnet.jaxb2_commons.plugin.Customizations; import org.jvnet.jaxb2_commons.plugin.CustomizedIgnoring; import org.jvnet.jaxb2_commons.plugin.Ignoring; import org.jvnet.jaxb2_commons.plugin.util.FieldOutlineUtils; import org.jvnet.jaxb2_commons.plugin.util.StrategyClassUtils; import org.jvnet.jaxb2_commons.util.ClassUtils; import org.jvnet.jaxb2_commons.util.FieldAccessorFactory; import org.jvnet.jaxb2_commons.util.PropertyFieldAccessorFactory; import org.jvnet.jaxb2_commons.xjc.outline.FieldAccessorEx; import org.xml.sax.ErrorHandler; import com.sun.codemodel.JBlock; import com.sun.codemodel.JCodeModel; import com.sun.codemodel.JConditional; import com.sun.codemodel.JDefinedClass; import com.sun.codemodel.JExpr; import com.sun.codemodel.JExpression; import com.sun.codemodel.JMethod; import com.sun.codemodel.JMod; import com.sun.codemodel.JOp; import com.sun.codemodel.JType; import com.sun.codemodel.JVar; import com.sun.tools.xjc.Options; import com.sun.tools.xjc.outline.ClassOutline; import com.sun.tools.xjc.outline.FieldOutline; import com.sun.tools.xjc.outline.Outline; public class MergeablePlugin extends AbstractParameterizablePlugin { @Override public String getOptionName() { return "Xmergeable"; } @Override public String getUsage() { return "TBD"; } private FieldAccessorFactory fieldAccessorFactory = PropertyFieldAccessorFactory.INSTANCE; public FieldAccessorFactory getFieldAccessorFactory() { return fieldAccessorFactory; } public void setFieldAccessorFactory( FieldAccessorFactory fieldAccessorFactory) { this.fieldAccessorFactory = fieldAccessorFactory; } private String mergeStrategyClass = JAXBMergeStrategy.class.getName(); public void setMergeStrategyClass(final String mergeStrategyClass) { this.mergeStrategyClass = mergeStrategyClass; } public String getMergeStrategyClass() { return mergeStrategyClass; } public JExpression createMergeStrategy(JCodeModel codeModel) { return StrategyClassUtils.createStrategyInstanceExpression(codeModel, MergeStrategy2.class, getMergeStrategyClass()); } private Ignoring ignoring = new CustomizedIgnoring( org.jvnet.jaxb2_commons.plugin.mergeable.Customizations.IGNORED_ELEMENT_NAME, Customizations.IGNORED_ELEMENT_NAME, Customizations.GENERATED_ELEMENT_NAME); public Ignoring getIgnoring() { return ignoring; } public void setIgnoring(Ignoring ignoring) { this.ignoring = ignoring; } @Override public Collection<QName> getCustomizationElementNames() { return Arrays .asList(org.jvnet.jaxb2_commons.plugin.mergeable.Customizations.IGNORED_ELEMENT_NAME, Customizations.IGNORED_ELEMENT_NAME, Customizations.GENERATED_ELEMENT_NAME); } @Override public boolean run(Outline outline, Options opt, ErrorHandler errorHandler) { for (final ClassOutline classOutline : outline.getClasses()) if (!getIgnoring().isIgnored(classOutline)) { processClassOutline(classOutline); } return true; } protected void processClassOutline(ClassOutline classOutline) { final JDefinedClass theClass = classOutline.implClass; ClassUtils ._implements(theClass, theClass.owner().ref(MergeFrom2.class)); @SuppressWarnings("unused") final JMethod mergeFrom$mergeFrom0 = generateMergeFrom$mergeFrom0( classOutline, theClass); @SuppressWarnings("unused") final JMethod mergeFrom$mergeFrom = generateMergeFrom$mergeFrom( classOutline, theClass); if (!classOutline.target.isAbstract()) { @SuppressWarnings("unused") final JMethod createCopy = generateMergeFrom$createNewInstance( classOutline, theClass); } } protected JMethod generateMergeFrom$mergeFrom0( final ClassOutline classOutline, final JDefinedClass theClass) { JCodeModel codeModel = theClass.owner(); final JMethod mergeFrom$mergeFrom = theClass.method(JMod.PUBLIC, codeModel.VOID, "mergeFrom"); mergeFrom$mergeFrom.annotate(Override.class); { final JVar left = mergeFrom$mergeFrom.param(Object.class, "left"); final JVar right = mergeFrom$mergeFrom.param(Object.class, "right"); final JBlock body = mergeFrom$mergeFrom.body(); final JVar mergeStrategy = body.decl(JMod.FINAL, codeModel.ref(MergeStrategy2.class), "strategy", createMergeStrategy(codeModel)); body.invoke("mergeFrom").arg(JExpr._null()).arg(JExpr._null()) .arg(left).arg(right).arg(mergeStrategy); } return mergeFrom$mergeFrom; } protected JMethod generateMergeFrom$mergeFrom(ClassOutline classOutline, final JDefinedClass theClass) { final JCodeModel codeModel = theClass.owner(); final JMethod mergeFrom = theClass.method(JMod.PUBLIC, codeModel.VOID, "mergeFrom"); mergeFrom.annotate(Override.class); { final JVar leftLocator = mergeFrom.param(ObjectLocator.class, "leftLocator"); final JVar rightLocator = mergeFrom.param(ObjectLocator.class, "rightLocator"); final JVar left = mergeFrom.param(Object.class, "left"); final JVar right = mergeFrom.param(Object.class, "right"); final JVar mergeStrategy = mergeFrom.param(MergeStrategy2.class, "strategy"); final JBlock methodBody = mergeFrom.body(); Boolean superClassImplementsMergeFrom = StrategyClassUtils .superClassImplements(classOutline, getIgnoring(), MergeFrom2.class); if (superClassImplementsMergeFrom == null) { } else if (superClassImplementsMergeFrom.booleanValue()) { methodBody.invoke(JExpr._super(), "mergeFrom").arg(leftLocator) .arg(rightLocator).arg(left).arg(right) .arg(mergeStrategy); } else { } final FieldOutline[] declaredFields = FieldOutlineUtils.filter( classOutline.getDeclaredFields(), getIgnoring()); if (declaredFields.length > 0) { final JBlock body = methodBody._if(right._instanceof(theClass)) ._then(); JVar target = body.decl(JMod.FINAL, theClass, "target", JExpr._this()); JVar leftObject = body.decl(JMod.FINAL, theClass, "leftObject", JExpr.cast(theClass, left)); JVar rightObject = body.decl(JMod.FINAL, theClass, "rightObject", JExpr.cast(theClass, right)); for (final FieldOutline fieldOutline : declaredFields) { final FieldAccessorEx leftFieldAccessor = getFieldAccessorFactory() .createFieldAccessor(fieldOutline, leftObject); final FieldAccessorEx rightFieldAccessor = getFieldAccessorFactory() .createFieldAccessor(fieldOutline, rightObject); if (leftFieldAccessor.isConstant() || rightFieldAccessor.isConstant()) { continue; } final JBlock block = body.block(); final JExpression leftFieldHasSetValue = (leftFieldAccessor .isAlwaysSet() || leftFieldAccessor.hasSetValue() == null) ? JExpr.TRUE : leftFieldAccessor.hasSetValue(); final JExpression rightFieldHasSetValue = (rightFieldAccessor .isAlwaysSet() || rightFieldAccessor.hasSetValue() == null) ? JExpr.TRUE : rightFieldAccessor.hasSetValue(); final JVar shouldBeSet = block.decl( codeModel.ref(Boolean.class), fieldOutline.getPropertyInfo().getName(false) + "ShouldBeMergedAndSet", mergeStrategy.invoke("shouldBeMergedAndSet") .arg(leftLocator).arg(rightLocator) .arg(leftFieldHasSetValue) .arg(rightFieldHasSetValue)); final JConditional ifShouldBeSetConditional = block._if(JOp .eq(shouldBeSet, codeModel.ref(Boolean.class) .staticRef("TRUE"))); final JBlock ifShouldBeSetBlock = ifShouldBeSetConditional ._then(); final JConditional ifShouldNotBeSetConditional = ifShouldBeSetConditional ._elseif(JOp.eq( shouldBeSet, codeModel.ref(Boolean.class).staticRef( "FALSE"))); final JBlock ifShouldBeUnsetBlock = ifShouldNotBeSetConditional ._then(); // final JBlock ifShouldBeIgnoredBlock = // ifShouldNotBeSetConditional // ._else(); final JVar leftField = ifShouldBeSetBlock.decl( leftFieldAccessor.getType(), "lhs" + fieldOutline.getPropertyInfo().getName( true)); leftFieldAccessor.toRawValue(ifShouldBeSetBlock, leftField); final JVar rightField = ifShouldBeSetBlock.decl( rightFieldAccessor.getType(), "rhs" + fieldOutline.getPropertyInfo().getName( true)); rightFieldAccessor.toRawValue(ifShouldBeSetBlock, rightField); final JExpression leftFieldLocator = codeModel .ref(LocatorUtils.class).staticInvoke("property") .arg(leftLocator) .arg(fieldOutline.getPropertyInfo().getName(false)) .arg(leftField); final JExpression rightFieldLocator = codeModel .ref(LocatorUtils.class).staticInvoke("property") .arg(rightLocator) .arg(fieldOutline.getPropertyInfo().getName(false)) .arg(rightField); final FieldAccessorEx targetFieldAccessor = getFieldAccessorFactory() .createFieldAccessor(fieldOutline, target); final JExpression mergedValue = JExpr.cast( targetFieldAccessor.getType(), mergeStrategy.invoke("merge").arg(leftFieldLocator) .arg(rightFieldLocator).arg(leftField) .arg(rightField).arg(leftFieldHasSetValue) .arg(rightFieldHasSetValue)); final JVar merged = ifShouldBeSetBlock.decl( rightFieldAccessor.getType(), "merged" + fieldOutline.getPropertyInfo().getName( true), mergedValue); targetFieldAccessor.fromRawValue( ifShouldBeSetBlock, "unique" + fieldOutline.getPropertyInfo().getName( true), merged); targetFieldAccessor.unsetValues(ifShouldBeUnsetBlock); } } } return mergeFrom; }<|fim▁hole|> final JMethod existingMethod = theClass.getMethod("createNewInstance", new JType[0]); if (existingMethod == null) { final JMethod newMethod = theClass.method(JMod.PUBLIC, theClass .owner().ref(Object.class), "createNewInstance"); newMethod.annotate(Override.class); { final JBlock body = newMethod.body(); body._return(JExpr._new(theClass)); } return newMethod; } else { return existingMethod; } } }<|fim▁end|>
protected JMethod generateMergeFrom$createNewInstance( final ClassOutline classOutline, final JDefinedClass theClass) {
<|file_name|>upgradecharm.go<|end_file_name|><|fim▁begin|>// Copyright 2013 Canonical Ltd. // Licensed under the AGPLv3, see LICENCE file for details. package main import ( "fmt" "os" "github.com/juju/cmd" "github.com/juju/names" "gopkg.in/juju/charm.v4" "launchpad.net/gnuflag" "github.com/juju/juju/cmd/envcmd" "github.com/juju/juju/environs/config" ) // UpgradeCharm is responsible for upgrading a service's charm. type UpgradeCharmCommand struct { envcmd.EnvCommandBase ServiceName string Force bool RepoPath string // defaults to JUJU_REPOSITORY SwitchURL string Revision int // defaults to -1 (latest) } const upgradeCharmDoc = ` When no flags are set, the service's charm will be upgraded to the latest revision available in the repository from which it was originally deployed. An explicit revision can be chosen with the --revision flag. If the charm came from a local repository, its path will be assumed to be $JUJU_REPOSITORY unless overridden by --repository. The local repository behaviour is tuned specifically to the workflow of a charm author working on a single client machine; use of local repositories from multiple clients is not supported and may lead to confusing behaviour. Each local charm gets uploaded with the revision specified in the charm, if possible, otherwise it gets a unique revision (highest in state + 1). The --switch flag allows you to replace the charm with an entirely different one. The new charm's URL and revision are inferred as they would be when running a deploy command. Please note that --switch is dangerous, because juju only has limited information with which to determine compatibility; the operation will succeed, regardless of potential havoc, so long as the following conditions hold: - The new charm must declare all relations that the service is currently participating in. - All config settings shared by the old and new charms must have the same types. The new charm may add new relations and configuration settings. --switch and --revision are mutually exclusive. To specify a given revision number with --switch, give it in the charm URL, for instance "cs:wordpress-5" would specify revision number 5 of the wordpress charm. <|fim▁hole|>Use of the --force flag is not generally recommended; units upgraded while in an error state will not have upgrade-charm hooks executed, and may cause unexpected behavior. ` func (c *UpgradeCharmCommand) Info() *cmd.Info { return &cmd.Info{ Name: "upgrade-charm", Args: "<service>", Purpose: "upgrade a service's charm", Doc: upgradeCharmDoc, } } func (c *UpgradeCharmCommand) SetFlags(f *gnuflag.FlagSet) { f.BoolVar(&c.Force, "force", false, "upgrade all units immediately, even if in error state") f.StringVar(&c.RepoPath, "repository", os.Getenv("JUJU_REPOSITORY"), "local charm repository path") f.StringVar(&c.SwitchURL, "switch", "", "crossgrade to a different charm") f.IntVar(&c.Revision, "revision", -1, "explicit revision of current charm") } func (c *UpgradeCharmCommand) Init(args []string) error { switch len(args) { case 1: if !names.IsValidService(args[0]) { return fmt.Errorf("invalid service name %q", args[0]) } c.ServiceName = args[0] case 0: return fmt.Errorf("no service specified") default: return cmd.CheckEmpty(args[1:]) } if c.SwitchURL != "" && c.Revision != -1 { return fmt.Errorf("--switch and --revision are mutually exclusive") } return nil } // Run connects to the specified environment and starts the charm // upgrade process. func (c *UpgradeCharmCommand) Run(ctx *cmd.Context) error { client, err := c.NewAPIClient() if err != nil { return err } defer client.Close() oldURL, err := client.ServiceGetCharmURL(c.ServiceName) if err != nil { return err } attrs, err := client.EnvironmentGet() if err != nil { return err } conf, err := config.New(config.NoDefaults, attrs) if err != nil { return err } var newURL *charm.URL if c.SwitchURL != "" { newURL, err = resolveCharmURL(c.SwitchURL, client, conf) if err != nil { return err } } else { // No new URL specified, but revision might have been. newURL = oldURL.WithRevision(c.Revision) } repo, err := charm.InferRepository(newURL.Reference(), ctx.AbsPath(c.RepoPath)) if err != nil { return err } repo = config.SpecializeCharmRepo(repo, conf) // If no explicit revision was set with either SwitchURL // or Revision flags, discover the latest. explicitRevision := true if newURL.Revision == -1 { explicitRevision = false latest, err := charm.Latest(repo, newURL) if err != nil { return err } newURL = newURL.WithRevision(latest) } if *newURL == *oldURL { if explicitRevision { return fmt.Errorf("already running specified charm %q", newURL) } else if newURL.Schema == "cs" { // No point in trying to upgrade a charm store charm when // we just determined that's the latest revision // available. return fmt.Errorf("already running latest charm %q", newURL) } } addedURL, err := addCharmViaAPI(client, ctx, newURL, repo) if err != nil { return err } return client.ServiceSetCharm(c.ServiceName, addedURL.String(), c.Force) }<|fim▁end|>
<|file_name|>CertificationRequestInfo.d.ts<|end_file_name|><|fim▁begin|>declare namespace jsrsasign.KJUR.asn1.csr { /** * ASN.1 CertificationRequestInfo structure class * @param params associative array of parameters (ex. {}) * @description * ``` * // -- DEFINITION OF ASN.1 SYNTAX -- * // CertificationRequestInfo ::= SEQUENCE { * // version INTEGER { v1(0) } (v1,...), * // subject Name, * // subjectPKInfo SubjectPublicKeyInfo{{ PKInfoAlgorithms }}, * // attributes [0] Attributes{{ CRIAttributes }} } * ``` * * @example * csri = new KJUR.asn1.csr.CertificationRequestInfo(); * csri.setSubjectByParam({'str': '/C=US/O=Test/CN=example.com'}); * csri.setSubjectPublicKeyByGetKey(pubKeyObj); */ class CertificationRequestInfo extends ASN1Object { constructor(); _initialize(): void; /** * set subject name field by parameter * @param x500NameParam X500Name parameter * @description * @example * csri.setSubjectByParam({'str': '/C=US/CN=b'}); * @see KJUR.asn1.x509.X500Name */ setSubjectByParam(x500NameParam: StringParam): void; /** * set subject public key info by RSA/ECDSA/DSA key parameter * @param keyParam public key parameter which passed to `KEYUTIL.getKey` argument * @example * csri.setSubjectPublicKeyByGetKeyParam(certPEMString); // or * csri.setSubjectPublicKeyByGetKeyParam(pkcs8PublicKeyPEMString); // or * csir.setSubjectPublicKeyByGetKeyParam(kjurCryptoECDSAKeyObject); // et.al. * @see KJUR.asn1.x509.SubjectPublicKeyInfo * @see KEYUTIL.getKey */ setSubjectPublicKeyByGetKey( keyParam: RSAKey | crypto.ECDSA | crypto.DSA | jws.JWS.JsonWebKey | { n: string; e: string } | string, ): void; /** * append X.509v3 extension to this object by name and parameters * @param name name of X.509v3 Extension object * @param extParams parameters as argument of Extension constructor. * @see KJUR.asn1.x509.Extension * @example * var o = new KJUR.asn1.csr.CertificationRequestInfo(); * o.appendExtensionByName('BasicConstraints', {'cA':true, 'critical': true}); * o.appendExtensionByName('KeyUsage', {'bin':'11'}); * o.appendExtensionByName('CRLDistributionPoints', {uri: 'http://aaa.com/a.crl'}); * o.appendExtensionByName('ExtKeyUsage', {array: [{name: 'clientAuth'}]}); * o.appendExtensionByName('AuthorityKeyIdentifier', {kid: '1234ab..'}); * o.appendExtensionByName('AuthorityInfoAccess', {array: [{accessMethod:{oid:...},accessLocation:{uri:...}}]});<|fim▁hole|> appendExtensionByName( name: string, extParams: | { ca: boolean; critical: boolean } | BinParam | x509.UriParam | ArrayParam<{ name: string }> | { kid: string } | ArrayParam<{ accessMethod: { oid: string }; accessLocation: x509.UriParam }>, ): void; getEncodedHex(): string; } }<|fim▁end|>
*/
<|file_name|>DistributedCartesianGridAI.py<|end_file_name|><|fim▁begin|>from pandac.PandaModules import * from direct.directnotify.DirectNotifyGlobal import directNotify from direct.task import Task from .DistributedNodeAI import DistributedNodeAI from .CartesianGridBase import CartesianGridBase class DistributedCartesianGridAI(DistributedNodeAI, CartesianGridBase): notify = directNotify.newCategory("DistributedCartesianGridAI") RuleSeparator = ":" def __init__(self, air, startingZone, gridSize, gridRadius, cellWidth, style="Cartesian"): DistributedNodeAI.__init__(self, air)<|fim▁hole|> self.gridRadius = gridRadius self.cellWidth = cellWidth # Keep track of all AI objects added to the grid self.gridObjects = {} self.updateTaskStarted = 0 def delete(self): DistributedNodeAI.delete(self) self.stopUpdateGridTask() def isGridParent(self): # If this distributed object is a DistributedGrid return 1. # 0 by default return 1 def getCellWidth(self): return self.cellWidth def getParentingRules(self): self.notify.debug("calling getter") rule = ("%i%s%i%s%i" % (self.startingZone, self.RuleSeparator, self.gridSize, self.RuleSeparator, self.gridRadius)) return [self.style, rule] # Reparent and setLocation on av to DistributedOceanGrid def addObjectToGrid(self, av, useZoneId=-1, startAutoUpdate=True): self.notify.debug("setting parent to grid %s" % self) avId = av.doId # Create a grid parent #gridParent = self.attachNewNode("gridParent-%s" % avId) #self.gridParents[avId] = gridParent self.gridObjects[avId] = av # Put the avatar on the grid self.handleAvatarZoneChange(av, useZoneId) if (not self.updateTaskStarted) and startAutoUpdate: self.startUpdateGridTask() def removeObjectFromGrid(self, av): # TODO: WHAT LOCATION SHOULD WE SET THIS TO? #av.wrtReparentTo(self.parentNP) #av.setLocation(self.air.districtId, 1000) # Remove grid parent for this av avId = av.doId if avId in self.gridObjects: del self.gridObjects[avId] # Stop task if there are no more av's being managed if len(self.gridObjects) == 0: self.stopUpdateGridTask() ##################################################################### # updateGridTask # This task is similar to the processVisibility task for the local client. # A couple differences: # - we are not doing setInterest on the AI (that is a local client # specific call). # - we assume that the moving objects on the grid are parented to a # gridParent, and are broadcasting their position relative to that # gridParent. This makes the task's math easy. Just check to see # when our position goes out of the current grid cell. When it does, # call handleAvatarZoneChange def startUpdateGridTask(self): self.stopUpdateGridTask() self.updateTaskStarted = 1 taskMgr.add(self.updateGridTask, self.taskName("updateGridTask")) def stopUpdateGridTask(self): taskMgr.remove(self.taskName("updateGridTask")) self.updateTaskStarted = 0 def updateGridTask(self, task=None): # Run through all grid objects and update their parents if needed missingObjs = [] for avId in self.gridObjects.keys(): av = self.gridObjects[avId] # handle a missing object after it is already gone? if (av.isEmpty()): task.setDelay(1.0) del self.gridObjects[avId] continue pos = av.getPos() if ((pos[0] < 0 or pos[1] < 0) or (pos[0] > self.cellWidth or pos[1] > self.cellWidth)): # we are out of the bounds of this current cell self.handleAvatarZoneChange(av) # Do this every second, not every frame if (task): task.setDelay(1.0) return Task.again def handleAvatarZoneChange(self, av, useZoneId=-1): # Calculate zone id # Get position of av relative to this grid if (useZoneId == -1): pos = av.getPos(self) zoneId = self.getZoneFromXYZ(pos) else: # zone already calculated, position of object might not # give the correct zone pos = None zoneId = useZoneId if not self.isValidZone(zoneId): self.notify.warning( "%s handleAvatarZoneChange %s: not a valid zone (%s) for pos %s" %(self.doId, av.doId, zoneId, pos)) return # Set the location on the server. # setLocation will update the gridParent av.b_setLocation(self.doId, zoneId) def handleSetLocation(self, av, parentId, zoneId): pass #if (av.parentId != parentId): # parent changed, need to look up instance tree # to see if avatar's named area location information # changed #av.requestRegionUpdateTask(regionegionUid)<|fim▁end|>
self.style = style self.startingZone = startingZone self.gridSize = gridSize
<|file_name|>struct_warnings.go<|end_file_name|><|fim▁begin|>package aegis //Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|>//You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. // Warnings is a nested struct in aegis response type Warnings struct { Warning []Warning `json:"Warning" xml:"Warning"` }<|fim▁end|>
//you may not use this file except in compliance with the License.
<|file_name|>save-action-service.js<|end_file_name|><|fim▁begin|>(function() { 'use strict'; angular .module('otusjs.player.core.phase') .service('otusjs.player.core.phase.SaveActionService', Service); Service.$inject = [ 'otusjs.player.core.phase.ActionPipeService', 'otusjs.player.core.phase.PreSaveActionService', 'otusjs.player.core.phase.ExecutionSaveActionService', 'otusjs.player.core.phase.PostSaveActionService' ]; <|fim▁hole|> /* Public methods */ self.PreSaveActionService = PreSaveActionService; self.ExecutionSaveActionService = ExecutionSaveActionService; self.PostSaveActionService = PostSaveActionService; self.execute = execute; function execute() { var phaseData = PreSaveActionService.execute(ActionPipeService.flowData); phaseData = ExecutionSaveActionService.execute(phaseData); phaseData = PostSaveActionService.execute(phaseData); } } })();<|fim▁end|>
function Service(ActionPipeService, PreSaveActionService, ExecutionSaveActionService, PostSaveActionService) { var self = this;
<|file_name|>MostSearchData.java<|end_file_name|><|fim▁begin|>package adamin90.com.wpp.model.mostsearch; import java.util.ArrayList; import java.util.List; import javax.annotation.Generated; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; @Generated("org.jsonschema2pojo") public class MostSearchData { @SerializedName("data") @Expose private List<Datum> data = new ArrayList<Datum>(); @SerializedName("code") @Expose private Integer code; /** * * @return * The data */ public List<Datum> getData() { return data; } /** * * @param data * The data */ public void setData(List<Datum> data) { this.data = data; } /** * * @return * The code */ public Integer getCode() { return code; } /** * * @param code * The code */ public void setCode(Integer code) { this.code = code;<|fim▁hole|><|fim▁end|>
} }
<|file_name|>common.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- APPNAME = 'desktop-mirror'<|fim▁hole|><|fim▁end|>
DEFAULT_PORT = 47767 VERSION = 'v0.8-7-g2038d52'
<|file_name|>map.ts<|end_file_name|><|fim▁begin|>import $ from 'jquery'; import Vue from "vue"; import { BootstrapVue } from "bootstrap-vue"; import { FacilMap } from "../lib"; import "./bootstrap.scss"; import "bootstrap-vue/dist/bootstrap-vue.css"; import withRender from "./map.vue"; import "./map.scss"; import { decodeQueryString, encodeQueryString } from "facilmap-utils"; import decodeURIComponent from "decode-uri-component"; Vue.use(BootstrapVue, { BDropdown: { popperOpts: { positionFixed: true, /* modifiers: { preventOverflow: { enabled: false }, hide: { enabled: false } } */ }, boundary: "window", noFlip: true<|fim▁hole|> BTooltip: { popperOpts: { positionFixed: true }, boundary: "window" } }); // Dereferrer $(document).on("click", "a", function() { const el = $(this); const href = el.attr("href"); if(href && href.match(/^\s*(https?:)?\/\//i)) { el.attr("href", "deref.html?"+encodeURIComponent(href)); setTimeout(function() { el.attr("href", href); }, 0); } }); if ('serviceWorker' in navigator) navigator.serviceWorker.register('./sw.js'); const queryParams = decodeQueryString(location.search); const toBoolean = (val: string, def: boolean) => (val == null ? def : val != "0" && val != "false" && val != "no"); const baseUrl = location.protocol + "//" + location.host + location.pathname.replace(/[^/]*$/, ""); const initialPadId = decodeURIComponent(location.pathname.match(/[^/]*$/)![0]); if(!location.hash || location.hash == "#") { const moveKeys = Object.keys(queryParams).filter((key) => ([ "zoom", "lat", "lon", "layer", "l", "q", "s", "c" ].includes(key))); if(moveKeys.length > 0) { const hashParams: Record<string, string> = { }; for (const key of moveKeys) { hashParams[key] = queryParams[key]; delete queryParams[key]; } const query = encodeQueryString(queryParams); const hash = encodeQueryString(hashParams); history.replaceState(null, "", baseUrl + encodeURIComponent(initialPadId || "") + (query ? "?" + query : "") + "#" + hash); } } new Vue(withRender({ el: "#loading", data: { padId: initialPadId, padName: undefined, baseUrl, toolbox: toBoolean(queryParams.toolbox, true), search: toBoolean(queryParams.search, true), autofocus: toBoolean(queryParams.autofocus, parent === window), legend: toBoolean(queryParams.legend, true), interactive: toBoolean(queryParams.interactive, parent === window), linkLogo: parent !== window }, watch: { padId: (padId: string | undefined) => { history.replaceState(null, "", baseUrl + (padId ? encodeURIComponent(padId) : "") + location.search + location.hash); }, padName: (padName: string | undefined) => { const title = padName ? padName + ' – FacilMap' : 'FacilMap'; // We have to call history.replaceState() in order for the new title to end up in the browser history window.history && history.replaceState({ }, title); document.title = title; } }, components: { FacilMap } }));<|fim▁end|>
},
<|file_name|>benchprint.py<|end_file_name|><|fim▁begin|>#===================================================== # Copyright (C) 2011 Andrea Arteaga <[email protected]> #===================================================== # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # import benchconfig as cfg from utils import benchutils as bu from os.path import dirname, join as pjoin class _Print: def __init__(self, logfile, maxlevel=10): self._level = 0 self._maxlevel = maxlevel self._logfile = logfile def __call__(self, arg='', end='\n'): printstr = str(arg) + end if self._level > 0: printstr = (self._level - 1) * " " + "-- " + printstr # Print to logfile bu.mkdir(dirname(self._logfile)) logfile = file(self._logfile, 'a') print >> logfile, printstr, logfile.close() # Print to terminal if self._level <= self._maxlevel: print printstr, def up(self, n=1): self._level = max(self._level - n, 0) def down(self, n=1): self._level = max(self._level + n, 0) # Uninitialized object (wait for argument parsing, directories lookup,... ) Print = None <|fim▁hole|>def initializePrint(): global Print Print = _Print(pjoin(cfg.logdir, 'main.log'), 3) return Print<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>export * from '../common'; export NodeBundle from './NodeBundle';<|fim▁hole|><|fim▁end|>
export CommonJsResolver from './CommonJsResolver';
<|file_name|>recipes-8.py<|end_file_name|><|fim▁begin|>np.random.seed(1234) fig, ax = plt.subplots(1) x = 30*np.random.randn(10000) mu = x.mean() median = np.median(x) sigma = x.std() textstr = '$\mu=%.2f$\n$\mathrm{median}=%.2f$\n$\sigma=%.2f$'%(mu, median, sigma) ax.hist(x, 50) # these are matplotlib.patch.Patch properties<|fim▁hole|> verticalalignment='top', bbox=props)<|fim▁end|>
props = dict(boxstyle='round', facecolor='wheat', alpha=0.5) # place a text box in upper left in axes coords ax.text(0.05, 0.95, textstr, transform=ax.transAxes, fontsize=14,
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>import datetime from django.conf import settings from django.db import DEFAULT_DB_ALIAS from django.test import TestCase, skipIfDBFeature from django.utils import tzinfo from models import Donut, RumBaba class DataTypesTestCase(TestCase): def test_boolean_type(self): d = Donut(name='Apple Fritter') self.assertFalse(d.is_frosted) self.assertTrue(d.has_sprinkles is None) d.has_sprinkles = True self.assertTrue(d.has_sprinkles) d.save() d2 = Donut.objects.get(name='Apple Fritter') self.assertFalse(d2.is_frosted) self.assertTrue(d2.has_sprinkles) def test_date_type(self): d = Donut(name='Apple Fritter') d.baked_date = datetime.date(year=1938, month=6, day=4) d.baked_time = datetime.time(hour=5, minute=30) d.consumed_at = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59) d.save() d2 = Donut.objects.get(name='Apple Fritter') self.assertEqual(d2.baked_date, datetime.date(1938, 6, 4)) self.assertEqual(d2.baked_time, datetime.time(5, 30)) self.assertEqual(d2.consumed_at, datetime.datetime(2007, 4, 20, 16, 19, 59)) def test_time_field(self): #Test for ticket #12059: TimeField wrongly handling datetime.datetime object. d = Donut(name='Apple Fritter') d.baked_time = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59) d.save() d2 = Donut.objects.get(name='Apple Fritter') self.assertEqual(d2.baked_time, datetime.time(16, 19, 59)) def test_year_boundaries(self): """Year boundary tests (ticket #3689)""" d = Donut.objects.create(name='Date Test 2007', baked_date=datetime.datetime(year=2007, month=12, day=31), consumed_at=datetime.datetime(year=2007, month=12, day=31, hour=23, minute=59, second=59)) d1 = Donut.objects.create(name='Date Test 2006', baked_date=datetime.datetime(year=2006, month=1, day=1), consumed_at=datetime.datetime(year=2006, month=1, day=1)) self.assertEqual("Date Test 2007", Donut.objects.filter(baked_date__year=2007)[0].name) self.assertEqual("Date Test 2006", Donut.objects.filter(baked_date__year=2006)[0].name) d2 = Donut.objects.create(name='Apple Fritter', consumed_at = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59)) self.assertEqual([u'Apple Fritter', u'Date Test 2007'], list(Donut.objects.filter(consumed_at__year=2007).order_by('name').values_list('name', flat=True))) self.assertEqual(0, Donut.objects.filter(consumed_at__year=2005).count()) self.assertEqual(0, Donut.objects.filter(consumed_at__year=2008).count()) def test_textfields_unicode(self): """Regression test for #10238: TextField values returned from the database should be unicode.""" d = Donut.objects.create(name=u'Jelly Donut', review=u'Outstanding') newd = Donut.objects.get(id=d.id) self.assert_(isinstance(newd.review, unicode)) @skipIfDBFeature('supports_timezones') def test_error_on_timezone(self): """Regression test for #8354: the MySQL and Oracle backends should raise an error if given a timezone-aware datetime object.""" dt = datetime.datetime(2008, 8, 31, 16, 20, tzinfo=tzinfo.FixedOffset(0)) d = Donut(name='Bear claw', consumed_at=dt)<|fim▁hole|> def test_datefield_auto_now_add(self): """Regression test for #10970, auto_now_add for DateField should store a Python datetime.date, not a datetime.datetime""" b = RumBaba.objects.create() # Verify we didn't break DateTimeField behavior self.assert_(isinstance(b.baked_timestamp, datetime.datetime)) # We need to test this this way because datetime.datetime inherits # from datetime.date: self.assert_(isinstance(b.baked_date, datetime.date) and not isinstance(b.baked_date, datetime.datetime))<|fim▁end|>
self.assertRaises(ValueError, d.save) # ValueError: MySQL backend does not support timezone-aware datetimes.
<|file_name|>TestLogSpecification.cpp<|end_file_name|><|fim▁begin|>//////////////////////////////////////////////////////////////////////////////// /// DISCLAIMER /// /// Copyright 2020-2021 ArangoDB GmbH, Cologne, Germany /// /// Licensed under the Apache License, Version 2.0 (the "License"); /// you may not use this file except in compliance with the License. /// You may obtain a copy of the License at /// /// http://www.apache.org/licenses/LICENSE-2.0 /// /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>/// limitations under the License. /// /// Copyright holder is ArangoDB GmbH, Cologne, Germany /// /// @author Lars Maier //////////////////////////////////////////////////////////////////////////////// #include "TestLogSpecification.h" #include "Replication2/Streams/LogMultiplexer.tpp" template struct arangodb::replication2::streams::LogMultiplexer<arangodb::replication2::test::MyTestSpecification>; template struct arangodb::replication2::streams::LogDemultiplexer<arangodb::replication2::test::MyTestSpecification>;<|fim▁end|>
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /// See the License for the specific language governing permissions and
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';<|fim▁hole|><|fim▁end|>
var path = require('path'); module.exports = path.join.bind(path, __dirname, '..');
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""Defines all the classes needed to create a Report from scratch. Report: a single document about the state of a cafe. Category: a class of products sharing common characteristics. Product: a single item in a cafe. Unit: a measure of products. FullProduct: a product with its quantity. """ from django.core.exceptions import ValidationError from django.core.validators import MinValueValidator from django.db import models from django.utils.translation import ugettext_lazy as _ class Report(models.Model): """Stores a single report created from selected FullProducts. Date of creation is set automatically. Currently logged in user is assigned to report as creator. """ created_on = models.DateTimeField(auto_now_add=True) updated_on = models.DateTimeField(auto_now=True) creator = models.ForeignKey( 'employees.Employee', null=True, blank=True, default=None ) caffe = models.ForeignKey( 'caffe.Caffe',<|fim▁hole|> ) class Meta: ordering = ('-created_on',) default_permissions = ('add', 'change', 'delete', 'view') def save(self, *args, **kwargs): """Save model into the database.""" if self.creator is not None: if self.caffe != self.creator.caffe: raise ValidationError( _('Kawiarnia i kawiarnia tworzącego powinna się zgadzać') ) self.full_clean() super(Report, self).save(*args, **kwargs) def __str__(self): return 'Report created: {:%Y-%m-%d %H:%M} {}'.format( self.created_on, self.creator ) class Category(models.Model): """Stores the category of a product, e.g. cake, tea, sandwich. Intended to be created once and then to reuse it in future reports. """ name = models.CharField(max_length=100) caffe = models.ForeignKey( 'caffe.Caffe', null=True, blank=False, default=None ) class Meta: ordering = ('name',) unique_together = ('name', 'caffe',) default_permissions = ('add', 'change', 'delete', 'view') def save(self, *args, **kwargs): """Save model into the database.""" self.full_clean() super(Category, self).save(*args, **kwargs) def __str__(self): return '{}'.format(self.name) class Unit(models.Model): """Stores a type of unit used to count the amount of products. Intended to be created once and then to reuse it in future reports. """ name = models.CharField(max_length=100) caffe = models.ForeignKey( 'caffe.Caffe', null=True, blank=False, default=None ) class Meta: ordering = ('name',) unique_together = ('name', 'caffe',) default_permissions = ('add', 'change', 'delete', 'view') def save(self, *args, **kwargs): """Save model into the database.""" self.full_clean() super(Unit, self).save(*args, **kwargs) def __str__(self): return '{}'.format(self.name) class Product(models.Model): """Stores a specific product, e.g. brownie, earl grey, PB&J sandwich. Intended to be created once and then to reuse it in future reports. Unit specifies how the amount of product is counted. """ name = models.CharField(max_length=100) category = models.ForeignKey('Category', on_delete=models.CASCADE) unit = models.ForeignKey('Unit', on_delete=models.CASCADE) caffe = models.ForeignKey( 'caffe.Caffe', null=True, blank=False, default=None ) class Meta: ordering = ('name',) unique_together = ('name', 'caffe',) default_permissions = ('add', 'change', 'delete', 'view') def save(self, *args, **kwargs): """Save model into the database.""" if self.caffe != self.category.caffe: raise ValidationError( _('Kawiarnia i kawiarnia kategorii nie zgadza się.') ) if self.caffe != self.unit.caffe: raise ValidationError( _('Kawiarnia i kawiarnia jednostki nie zgadza się.') ) self.full_clean() super(Product, self).save(*args, **kwargs) def __str__(self): return '{}'.format(self.name) class FullProduct(models.Model): """Stores a product with its quantity. Intended to be used once, only in one report. """ product = models.ForeignKey('Product') amount = models.FloatField(validators=[MinValueValidator(0)]) report = models.ForeignKey( 'Report', blank=True, null=True, related_name='full_products' ) caffe = models.ForeignKey( 'caffe.Caffe', null=True, blank=False, default=None ) def clean(self, *args, **kwargs): """Clean data and check validation.""" # checks if there exists two same products full_products = [] if self.report is not None: full_products = self.report.full_products.all() for full_product in full_products: if full_product.product == self.product: raise ValidationError( _('Report should not contain two same products.') ) super(FullProduct, self).clean(*args, **kwargs) def save(self, *args, **kwargs): """Save model into the database.""" if self.report: if self.caffe != self.report.caffe: raise ValidationError( _('Kawiarnia i kawiarnia raportu nie zgadza się.') ) if self.caffe != self.product.caffe: raise ValidationError( _('Kawiarnia i kawiarnia produktu nie zgadza się.') ) self.full_clean() super(FullProduct, self).save(*args, **kwargs) def __str__(self): return '{0}, {1:g} {2}'.format( self.product, self.amount, self.product.unit )<|fim▁end|>
null=True, blank=False, default=None
<|file_name|>import8.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use foo::x;<|fim▁hole|>use foo::x as z; mod foo { pub fn x(y: int) { println!("{}", y); } } pub fn main() { x(10); z(10); }<|fim▁end|>
<|file_name|>GlobalTask.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2010-2101 Alibaba Group Holding Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.otter.node.etl.common.task; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import org.I0Itec.zkclient.exception.ZkInterruptedException; import org.apache.commons.lang.ClassUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.commons.lang.math.RandomUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.alibaba.otter.node.common.config.ConfigClientService; import com.alibaba.otter.node.etl.common.jmx.StageAggregationCollector; import com.alibaba.otter.node.etl.common.pipe.impl.RowDataPipeDelegate; import com.alibaba.otter.shared.arbitrate.ArbitrateEventService; import com.alibaba.otter.shared.arbitrate.model.TerminEventData; import com.alibaba.otter.shared.arbitrate.model.TerminEventData.TerminType; import com.alibaba.otter.shared.common.model.config.pipeline.Pipeline; <|fim▁hole|> * mainstem,select,extract,transform,load parent Thread. * * @author xiaoqing.zhouxq 2011-8-23 上午10:38:14 */ public abstract class GlobalTask extends Thread { protected final Logger logger = LoggerFactory.getLogger(this.getClass()); protected volatile boolean running = true; protected Pipeline pipeline; protected Long pipelineId; protected ArbitrateEventService arbitrateEventService; protected RowDataPipeDelegate rowDataPipeDelegate; protected ExecutorService executorService; protected ConfigClientService configClientService; protected StageAggregationCollector stageAggregationCollector; protected Map<Long, Future> pendingFuture; public GlobalTask(Pipeline pipeline){ this(pipeline.getId()); this.pipeline = pipeline; } public GlobalTask(Long pipelineId){ this.pipelineId = pipelineId; setName(createTaskName(pipelineId, ClassUtils.getShortClassName(this.getClass()))); pendingFuture = new HashMap<Long, Future>(); } public void shutdown() { running = false; interrupt(); List<Future> cancelFutures = new ArrayList<Future>(); for (Map.Entry<Long, Future> entry : pendingFuture.entrySet()) { if (!entry.getValue().isDone()) { logger.warn("WARN ## Task future processId[{}] canceled!", entry.getKey()); cancelFutures.add(entry.getValue()); } } for (Future future : cancelFutures) { future.cancel(true); } pendingFuture.clear(); } protected void sendRollbackTermin(long pipelineId, Throwable exception) { sendRollbackTermin(pipelineId, ExceptionUtils.getFullStackTrace(exception)); } protected void sendRollbackTermin(long pipelineId, String message) { TerminEventData errorEventData = new TerminEventData(); errorEventData.setPipelineId(pipelineId); errorEventData.setType(TerminType.ROLLBACK); errorEventData.setCode("setl"); errorEventData.setDesc(message); arbitrateEventService.terminEvent().single(errorEventData); // 每次发送完报警后,sleep一段时间,继续做后面的事 try { Thread.sleep(3000 + RandomUtils.nextInt(3000)); } catch (InterruptedException e) { } } /** * 自动处理数据为null的情况,重新发一遍数据 */ protected void processMissData(long pipelineId, String message) { TerminEventData errorEventData = new TerminEventData(); errorEventData.setPipelineId(pipelineId); errorEventData.setType(TerminType.RESTART); errorEventData.setCode("setl"); errorEventData.setDesc(message); arbitrateEventService.terminEvent().single(errorEventData); } protected String createTaskName(long pipelineId, String taskName) { return new StringBuilder().append("pipelineId = ").append(pipelineId).append(",taskName = ").append(taskName).toString(); } protected boolean isProfiling() { return stageAggregationCollector.isProfiling(); } protected boolean isInterrupt(Throwable e) { if (!running) { return true; } if (e instanceof InterruptedException || e instanceof ZkInterruptedException) { return true; } if (ExceptionUtils.getRootCause(e) instanceof InterruptedException) { return true; } return false; } public Collection<Long> getPendingProcess() { List<Long> result = new ArrayList<Long>(pendingFuture.keySet()); Collections.sort(result); return result; } // ====================== setter / getter ========================= public void setArbitrateEventService(ArbitrateEventService arbitrateEventService) { this.arbitrateEventService = arbitrateEventService; } public void setRowDataPipeDelegate(RowDataPipeDelegate rowDataPipeDelegate) { this.rowDataPipeDelegate = rowDataPipeDelegate; } public void setExecutorService(ExecutorService executorService) { this.executorService = executorService; } public void setConfigClientService(ConfigClientService configClientService) { this.configClientService = configClientService; } public void setStageAggregationCollector(StageAggregationCollector stageAggregationCollector) { this.stageAggregationCollector = stageAggregationCollector; } }<|fim▁end|>
/**
<|file_name|>choose_fastest_branch_dataset_serialization_test.py<|end_file_name|><|fim▁begin|># Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for the ChooseFastestBranchDataset serialization.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.data.experimental.kernel_tests.serialization import dataset_serialization_test_base from tensorflow.python.data.experimental.ops import batching from tensorflow.python.data.experimental.ops import optimization from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.ops import math_ops from tensorflow.python.platform import test class ChooseFastestBranchDatasetSerializationTest( dataset_serialization_test_base.DatasetSerializationTestBase): def testCore(self): def build_ds(size): dataset = dataset_ops.Dataset.range(size) def branch_0(dataset): return dataset.map(lambda x: x).batch(10)<|fim▁hole|> return optimization._ChooseFastestBranchDataset( # pylint: disable=protected-access dataset, [branch_0, branch_1], ratio_numerator=10) for size in [100, 1000]: self.run_core_tests(lambda: build_ds(size), None, size // 10) # pylint: disable=cell-var-from-loop def testWithCapture(self): def build_ds(): dataset = dataset_ops.Dataset.range(10) const_64 = constant_op.constant(1, dtypes.int64) const_32 = constant_op.constant(1, dtypes.int32) def branch_0(dataset): return dataset.map(lambda x: x + const_64) def branch_1(dataset): return dataset.map(lambda x: x + math_ops.cast(const_32, dtypes.int64)) return optimization._ChooseFastestBranchDataset( dataset, [branch_0, branch_1], num_elements_per_branch=3) self.run_core_tests(build_ds, None, 10) def testWithPrefetch(self): def build_ds(): dataset = dataset_ops.Dataset.range(10) const_64 = constant_op.constant(1, dtypes.int64) const_32 = constant_op.constant(1, dtypes.int32) def branch_0(dataset): return dataset.map(lambda x: x + const_64) def branch_1(dataset): return dataset.map(lambda x: x + math_ops.cast(const_32, dtypes.int64)) return optimization._ChooseFastestBranchDataset( dataset, [branch_0, branch_1], num_elements_per_branch=3) self.run_core_tests(build_ds, None, 10) def testWithMoreOutputThanInput(self): def build_ds(): dataset = dataset_ops.Dataset.from_tensors(0).repeat(1000).batch(100) def branch(dataset): return dataset.apply(batching.unbatch()) return optimization._ChooseFastestBranchDataset( dataset, [branch, branch], ratio_denominator=10, num_elements_per_branch=100) self.run_core_tests(build_ds, None, 1000) if __name__ == "__main__": test.main()<|fim▁end|>
def branch_1(dataset): return dataset.batch(10).map(lambda x: x)
<|file_name|>status.py<|end_file_name|><|fim▁begin|>import json as json_ # Template for code 200 requests so data can easily be added def ok(d=None, *, json=True): code = {'code': 200, 'status': 'OK', 'data': d} if json: code = json_.dumps(code) return code # The 400 codes shouldn't require any special aruments. def invalid_request(*, json=True): code = {'code': 400, 'status': 'MALFORMED_REQUEST'} if json: code = json_.dumps(code) return code def unknown_request(*, json=True): code = {'code': 400, 'status': 'UNKNOWN_REQUEST'}<|fim▁hole|> # You can assign the internal server error a number for debugging purposes. def internal_server_error(n=None, *, json=True): status_string = 'INTERNAL_SERVER_ERROR' if n is not None: status_string += '_{}'.format(n) code = {'code': 500, 'status': status_string} if json: code = json_.dumps(code) return code<|fim▁end|>
if json: code = json_.dumps(code) return code
<|file_name|>event.js<|end_file_name|><|fim▁begin|>/* ***** BEGIN LICENSE BLOCK ***** * Distributed under the BSD license: * * Copyright (c) 2010, Ajax.org B.V. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of Ajax.org B.V. nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * ***** END LICENSE BLOCK ***** */ define(function(require, exports, module) { "use strict"; var keys = require("./keys"); var useragent = require("./useragent"); exports.addListener = function(elem, type, callback) { if (elem.addEventListener) { return elem.addEventListener(type, callback, false); } if (elem.attachEvent) { var wrapper = function() { callback.call(elem, window.event); }; callback._wrapper = wrapper; elem.attachEvent("on" + type, wrapper); } }; exports.removeListener = function(elem, type, callback) { if (elem.removeEventListener) { return elem.removeEventListener(type, callback, false); } if (elem.detachEvent) { elem.detachEvent("on" + type, callback._wrapper || callback); } }; /* * Prevents propagation and clobbers the default action of the passed event */ exports.stopEvent = function(e) { exports.stopPropagation(e); exports.preventDefault(e); return false; }; exports.stopPropagation = function(e) { if (e.stopPropagation) e.stopPropagation(); else e.cancelBubble = true; }; exports.preventDefault = function(e) { if (e.preventDefault) e.preventDefault(); else e.returnValue = false; }; /* * @return {Number} 0 for left button, 1 for middle button, 2 for right button */ exports.getButton = function(e) { if (e.type == "dblclick") return 0; if (e.type == "contextmenu" || (useragent.isMac && (e.ctrlKey && !e.altKey && !e.shiftKey))) return 2; // DOM Event if (e.preventDefault) { return e.button; } // old IE else { return {1:0, 2:2, 4:1}[e.button]; } }; exports.capture = function(el, eventHandler, releaseCaptureHandler) { function onMouseUp(e) { eventHandler && eventHandler(e); releaseCaptureHandler && releaseCaptureHandler(e); exports.removeListener(document, "mousemove", eventHandler, true); exports.removeListener(document, "mouseup", onMouseUp, true); exports.removeListener(document, "dragstart", onMouseUp, true); } exports.addListener(document, "mousemove", eventHandler, true); exports.addListener(document, "mouseup", onMouseUp, true); exports.addListener(document, "dragstart", onMouseUp, true); return onMouseUp; }; exports.addMouseWheelListener = function(el, callback) { if ("onmousewheel" in el) { exports.addListener(el, "mousewheel", function(e) { var factor = 8; if (e.wheelDeltaX !== undefined) { e.wheelX = -e.wheelDeltaX / factor; e.wheelY = -e.wheelDeltaY / factor; } else { e.wheelX = 0; e.wheelY = -e.wheelDelta / factor; } callback(e); }); } else if ("onwheel" in el) { exports.addListener(el, "wheel", function(e) { var factor = 0.35; switch (e.deltaMode) { case e.DOM_DELTA_PIXEL: e.wheelX = e.deltaX * factor || 0; e.wheelY = e.deltaY * factor || 0; break; case e.DOM_DELTA_LINE: case e.DOM_DELTA_PAGE: e.wheelX = (e.deltaX || 0) * 5; e.wheelY = (e.deltaY || 0) * 5; break; } callback(e); }); } else { exports.addListener(el, "DOMMouseScroll", function(e) { if (e.axis && e.axis == e.HORIZONTAL_AXIS) { e.wheelX = (e.detail || 0) * 5; e.wheelY = 0; } else { e.wheelX = 0; e.wheelY = (e.detail || 0) * 5; } callback(e); }); } }; exports.addMultiMouseDownListener = function(el, timeouts, eventHandler, callbackName) { var clicks = 0; var startX, startY, timer; var eventNames = { 2: "dblclick", 3: "tripleclick", 4: "quadclick" }; exports.addListener(el, "mousedown", function(e) { if (exports.getButton(e) !== 0) { clicks = 0; } else if (e.detail > 1) { clicks++; if (clicks > 4) clicks = 1; } else { clicks = 1; }<|fim▁hole|> if (!timer || isNewClick) clicks = 1; if (timer) clearTimeout(timer); timer = setTimeout(function() {timer = null}, timeouts[clicks - 1] || 600); if (clicks == 1) { startX = e.clientX; startY = e.clientY; } } e._clicks = clicks; eventHandler[callbackName]("mousedown", e); if (clicks > 4) clicks = 0; else if (clicks > 1) return eventHandler[callbackName](eventNames[clicks], e); }); if (useragent.isOldIE) { exports.addListener(el, "dblclick", function(e) { clicks = 2; if (timer) clearTimeout(timer); timer = setTimeout(function() {timer = null}, timeouts[clicks - 1] || 600); eventHandler[callbackName]("mousedown", e); eventHandler[callbackName](eventNames[clicks], e); }); } }; var getModifierHash = useragent.isMac && useragent.isOpera && !("KeyboardEvent" in window) ? function(e) { return 0 | (e.metaKey ? 1 : 0) | (e.altKey ? 2 : 0) | (e.shiftKey ? 4 : 0) | (e.ctrlKey ? 8 : 0); } : function(e) { return 0 | (e.ctrlKey ? 1 : 0) | (e.altKey ? 2 : 0) | (e.shiftKey ? 4 : 0) | (e.metaKey ? 8 : 0); }; exports.getModifierString = function(e) { return keys.KEY_MODS[getModifierHash(e)]; }; function normalizeCommandKeys(callback, e, keyCode) { var hashId = getModifierHash(e); if (!useragent.isMac && pressedKeys) { if (pressedKeys[91] || pressedKeys[92]) hashId |= 8; if (pressedKeys.altGr) { if ((3 & hashId) != 3) pressedKeys.altGr = 0; else return; } if (keyCode === 18 || keyCode === 17) { var location = "location" in e ? e.location : e.keyLocation; if (keyCode === 17 && location === 1) { if (pressedKeys[keyCode] == 1) ts = e.timeStamp; } else if (keyCode === 18 && hashId === 3 && location === 2) { var dt = e.timeStamp - ts; if (dt < 50) pressedKeys.altGr = true; } } } if (keyCode in keys.MODIFIER_KEYS) { keyCode = -1; } if (hashId & 8 && (keyCode === 91 || keyCode === 93)) { keyCode = -1; } if (!hashId && keyCode === 13) { var location = "location" in e ? e.location : e.keyLocation; if (location === 3) { callback(e, hashId, -keyCode); if (e.defaultPrevented) return; } } if (useragent.isChromeOS && hashId & 8) { callback(e, hashId, keyCode); if (e.defaultPrevented) return; else hashId &= ~8; } // If there is no hashId and the keyCode is not a function key, then // we don't call the callback as we don't handle a command key here // (it's a normal key/character input). if (!hashId && !(keyCode in keys.FUNCTION_KEYS) && !(keyCode in keys.PRINTABLE_KEYS)) { return false; } return callback(e, hashId, keyCode); } var pressedKeys = null; var ts = 0; exports.addCommandKeyListener = function(el, callback) { var addListener = exports.addListener; if (useragent.isOldGecko || (useragent.isOpera && !("KeyboardEvent" in window))) { // Old versions of Gecko aka. Firefox < 4.0 didn't repeat the keydown // event if the user pressed the key for a longer time. Instead, the // keydown event was fired once and later on only the keypress event. // To emulate the 'right' keydown behavior, the keyCode of the initial // keyDown event is stored and in the following keypress events the // stores keyCode is used to emulate a keyDown event. var lastKeyDownKeyCode = null; addListener(el, "keydown", function(e) { lastKeyDownKeyCode = e.keyCode; }); addListener(el, "keypress", function(e) { return normalizeCommandKeys(callback, e, lastKeyDownKeyCode); }); } else { var lastDefaultPrevented = null; addListener(el, "keydown", function(e) { pressedKeys[e.keyCode] = (pressedKeys[e.keyCode] || 0) + 1; var result = normalizeCommandKeys(callback, e, e.keyCode); lastDefaultPrevented = e.defaultPrevented; return result; }); addListener(el, "keypress", function(e) { if (lastDefaultPrevented && (e.ctrlKey || e.altKey || e.shiftKey || e.metaKey)) { exports.stopEvent(e); lastDefaultPrevented = null; } }); addListener(el, "keyup", function(e) { pressedKeys[e.keyCode] = null; }); if (!pressedKeys) { pressedKeys = Object.create(null); addListener(window, "focus", function(e) { pressedKeys = Object.create(null); }); } } }; if (window.postMessage && !useragent.isOldIE) { var postMessageId = 1; exports.nextTick = function(callback, win) { win = win || window; var messageName = "zero-timeout-message-" + postMessageId; exports.addListener(win, "message", function listener(e) { if (e.data == messageName) { exports.stopPropagation(e); exports.removeListener(win, "message", listener); callback(); } }); win.postMessage(messageName, "*"); }; } exports.nextFrame = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.webkitRequestAnimationFrame || window.msRequestAnimationFrame || window.oRequestAnimationFrame; if (exports.nextFrame) exports.nextFrame = exports.nextFrame.bind(window); else exports.nextFrame = function(callback) { setTimeout(callback, 17); }; });<|fim▁end|>
if (useragent.isIE) { var isNewClick = Math.abs(e.clientX - startX) > 5 || Math.abs(e.clientY - startY) > 5;
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyleft (ↄ) meh. <[email protected]> | http://meh.schizofreni.co // // This file is part of cancer. // // cancer is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // cancer is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with cancer. If not, see <http://www.gnu.org/licenses/>. mod window; pub use self::window::{Window, Request}; mod keyboard; pub use self::keyboard::Keyboard; mod proxy;<|fim▁hole|><|fim▁end|>
pub use self::proxy::Proxy;
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import json import os from errata_tool import ErrataConnector, Erratum from errata_tool.products import ProductList import requests import pytest TESTS_DIR = os.path.dirname(os.path.abspath(__file__)) FIXTURES_DIR = os.path.join(TESTS_DIR, 'fixtures') class MockResponse(object):<|fim▁hole|> def raise_for_status(self): pass @property def _fixture(self): """ Return path to our static fixture file. """ return self.url.replace('https://errata.devel.redhat.com/', os.path.join(FIXTURES_DIR, 'errata.devel.redhat.com/')) def json(self): try: with open(self._fixture) as fp: return json.load(fp) except IOError: print('Try ./new-fixture.sh %s' % self.url) raise @property def text(self): """ Return contents of our static fixture file. """ try: with open(self._fixture) as fp: return fp.read() except IOError: print('Try ./new-fixture.sh %s' % self.url) raise class RequestRecorder(object): """ Record args to requests.get() or requests.post() """ def __call__(self, url, **kwargs): """ mocking requests.get() or requests.post() """ self.response = MockResponse() self.response.url = url self.kwargs = kwargs return self.response @pytest.fixture def mock_get(): return RequestRecorder() @pytest.fixture def mock_post(): return RequestRecorder() @pytest.fixture def mock_put(): return RequestRecorder() @pytest.fixture def advisory(monkeypatch, mock_get): monkeypatch.delattr('requests.sessions.Session.request') monkeypatch.setattr(ErrataConnector, '_auth', None) monkeypatch.setattr(requests, 'get', mock_get) return Erratum(errata_id=26175) @pytest.fixture def rhsa(monkeypatch, mock_get): """ Like the advisory() fixture above, but an RHSA. """ monkeypatch.delattr('requests.sessions.Session.request') monkeypatch.setattr(ErrataConnector, '_auth', None) monkeypatch.setattr(requests, 'get', mock_get) return Erratum(errata_id=25856) @pytest.fixture def productlist(monkeypatch, mock_get): monkeypatch.delattr('requests.sessions.Session.request') monkeypatch.setattr(ErrataConnector, '_auth', None) monkeypatch.setattr(requests, 'get', mock_get) return ProductList()<|fim▁end|>
status_code = 200 encoding = 'utf-8' headers = {'content-type': 'application/json; charset=utf-8'}
<|file_name|>test_problem_generator.py<|end_file_name|><|fim▁begin|>import unittest from ai_graph_color import line, problem_generator class TestProblemGenerator(unittest.TestCase): def test_generate_file_path(self): """ Tests create file """ file_names = ['test.json', ''] for file_name in file_names: file_path = problem_generator.generate_file_path(file_name) self.assertEqual(type(''), type(file_path)) self.assertEqual('problems/{}'.format(file_name), file_path) def test_read_and_write_graph_to_file(self): """ Tests write graph to file """ num_verts = [0, 5, 100] for index, num_vert in enumerate(num_verts): graph = problem_generator.generate_graph(num_vert) problem_generator.write_graph_to_file( 'test{}.json'.format(index), graph ) self.assertEqual( graph, problem_generator.read_graph_from_file( 'test{}.json'.format(index) ) ) def test_generate_graph(self): """ Tests generate graph """ num_verts = [0, 5, 100] for num_vert in num_verts: graph = problem_generator.generate_graph(num_vert) self.assertEquals( num_vert, len(graph) ) for connections in graph: self.assertGreater( len(connections), 0 ) def test_build_graph(self): """ Tests build graph """ points = [ [(0, 0), (0, 0)], [], [(100, 100), (1000, 1000)] ] # This will need to change once build_graph is implemented for point in points: graph = problem_generator.build_graph(point) self.assertEquals( len(point), len(graph) ) for connections in graph: self.assertGreater( len(connections), 0 ) def test_scatter_points(self): """ Tests scatter points """ num_points = [0, 1, 100] for num_point in num_points: self.assertEqual( num_point, len(problem_generator.scatter_points(num_point)) ) def test_create_lines(self): """ Tests certain properties hold for the lines-map on sample points: - The points indexed by a line's key are the same as the points listed in the line - The distance calculated in a mapped line matches the distance between the points indexed by that line's key - The line can be freed without exception """ points = [(0.0, 0.0), (0.0, 3.0), (1.0, 1.0), (1.0, 5.0)] lines = problem_generator.create_lines(points) for pair, connecting_line in lines.items(): distance = line.point_distance( *map(lambda i: points[i], pair) )<|fim▁hole|> self.assertAlmostEqual(distance, connecting_line.distance) self.assertEqual( frozenset(map(lambda i: points[i], pair)), frozenset([connecting_line.left_point, connecting_line.right_point]) ) connecting_line.free() # should not raise any errors def test_create_distance_list(self): """ Tests certain properties hold for the distance list: - Lines in a linked-list are ordered by distance - Each line described in a list uses the correct indexes - Freeing a line removes it from the line-map and both of the linked-lists in the distance-list """ points = [(0.0, 0.0), (0.0, 3.0), (1.0, 1.0), (1.0, 5.0)] lines = problem_generator.create_lines(points) distance_list = problem_generator.create_distance_list( lines, len(points) ) for src_index, connections in enumerate(distance_list): distances = map(lambda i: i[1].distance, connections) self.assertTrue(all( distances[i] <= distances[i + 1] for i in xrange(len(distances) - 1) )) for dst_index, connecting_line in connections: self.assertEqual( frozenset(map(lambda i: points[i], [src_index, dst_index])), frozenset([connecting_line.left_point, connecting_line.right_point]) ) for connections in distance_list: for other_index, connecting_line in connections: first_list_size = len(connections) other_list_size = len(distance_list[other_index]) lines_size = len(lines) connecting_line.free() self.assertEqual(first_list_size - 1, len(connections)) self.assertEqual(other_list_size - 1, len(distance_list[other_index])) self.assertEqual(lines_size - 1, len(lines)) self.assertEquals(0, len(connections)) self.assertEquals(0, len(lines))<|fim▁end|>
<|file_name|>local.go<|end_file_name|><|fim▁begin|>package local import ( "errors" "fmt" "io" "io/ioutil" "os" "path/filepath" "sort" "github.com/restic/restic/backend" ) var ErrWrongData = errors.New("wrong data returned by backend, checksum does not match") type Local struct { p string } // Open opens the local backend at dir. func Open(dir string) (*Local, error) { items := []string{ dir, filepath.Join(dir, backend.Paths.Data), filepath.Join(dir, backend.Paths.Snapshots), filepath.Join(dir, backend.Paths.Index), filepath.Join(dir, backend.Paths.Locks), filepath.Join(dir, backend.Paths.Keys), filepath.Join(dir, backend.Paths.Temp), } // test if all necessary dirs are there for _, d := range items { if _, err := os.Stat(d); err != nil { return nil, fmt.Errorf("%s does not exist", d) } } return &Local{p: dir}, nil } // Create creates all the necessary files and directories for a new local // backend at dir. Afterwards a new config blob should be created. func Create(dir string) (*Local, error) { dirs := []string{ dir, filepath.Join(dir, backend.Paths.Data), filepath.Join(dir, backend.Paths.Snapshots), filepath.Join(dir, backend.Paths.Index), filepath.Join(dir, backend.Paths.Locks), filepath.Join(dir, backend.Paths.Keys), filepath.Join(dir, backend.Paths.Temp), } // test if config file already exists _, err := os.Lstat(backend.Paths.Config) if err == nil { return nil, errors.New("config file already exists") } // test if directories already exist for _, d := range dirs[1:] { if _, err := os.Stat(d); err == nil { return nil, fmt.Errorf("dir %s already exists", d) } } // create paths for data, refs and temp for _, d := range dirs { err := os.MkdirAll(d, backend.Modes.Dir) if err != nil { return nil, err } } // open backend return Open(dir) } // Location returns this backend's location (the directory name). func (b *Local) Location() string { return b.p } // Return temp directory in correct directory for this backend. func (b *Local) tempFile() (*os.File, error) { return ioutil.TempFile(filepath.Join(b.p, backend.Paths.Temp), "temp-") } type localBlob struct { f *os.File size uint final bool basedir string } func (lb *localBlob) Write(p []byte) (int, error) { if lb.final { return 0, errors.New("blob already closed") } n, err := lb.f.Write(p) lb.size += uint(n) return n, err } func (lb *localBlob) Size() uint { return lb.size } func (lb *localBlob) Finalize(t backend.Type, name string) error { if lb.final { return errors.New("Already finalized") } lb.final = true err := lb.f.Close() if err != nil { return fmt.Errorf("local: file.Close: %v", err) } f := filename(lb.basedir, t, name) // create directories if necessary, ignore errors if t == backend.Data { os.MkdirAll(filepath.Dir(f), backend.Modes.Dir) } // test if new path already exists if _, err := os.Stat(f); err == nil { return fmt.Errorf("Close(): file %v already exists", f) } if err := os.Rename(lb.f.Name(), f); err != nil { return err } // set mode to read-only fi, err := os.Stat(f) if err != nil { return err }<|fim▁hole|> return os.Chmod(f, fi.Mode()&os.FileMode(^uint32(0222))) } // Create creates a new Blob. The data is available only after Finalize() // has been called on the returned Blob. func (b *Local) Create() (backend.Blob, error) { // TODO: make sure that tempfile is removed upon error // create tempfile in backend file, err := b.tempFile() if err != nil { return nil, err } blob := localBlob{ f: file, basedir: b.p, } return &blob, nil } // Construct path for given Type and name. func filename(base string, t backend.Type, name string) string { if t == backend.Config { return filepath.Join(base, "config") } return filepath.Join(dirname(base, t, name), name) } // Construct directory for given Type. func dirname(base string, t backend.Type, name string) string { var n string switch t { case backend.Data: n = backend.Paths.Data if len(name) > 2 { n = filepath.Join(n, name[:2]) } case backend.Snapshot: n = backend.Paths.Snapshots case backend.Index: n = backend.Paths.Index case backend.Lock: n = backend.Paths.Locks case backend.Key: n = backend.Paths.Keys } return filepath.Join(base, n) } // Get returns a reader that yields the content stored under the given // name. The reader should be closed after draining it. func (b *Local) Get(t backend.Type, name string) (io.ReadCloser, error) { return os.Open(filename(b.p, t, name)) } // GetReader returns an io.ReadCloser for the Blob with the given name of // type t at offset and length. If length is 0, the reader reads until EOF. func (b *Local) GetReader(t backend.Type, name string, offset, length uint) (io.ReadCloser, error) { f, err := os.Open(filename(b.p, t, name)) if err != nil { return nil, err } _, err = f.Seek(int64(offset), 0) if err != nil { return nil, err } if length == 0 { return f, nil } return backend.LimitReadCloser(f, int64(length)), nil } // Test returns true if a blob of the given type and name exists in the backend. func (b *Local) Test(t backend.Type, name string) (bool, error) { _, err := os.Stat(filename(b.p, t, name)) if err != nil { if os.IsNotExist(err) { return false, nil } return false, err } return true, nil } // Remove removes the blob with the given name and type. func (b *Local) Remove(t backend.Type, name string) error { return os.Remove(filename(b.p, t, name)) } // List returns a channel that yields all names of blobs of type t. A // goroutine is started for this. If the channel done is closed, sending // stops. func (b *Local) List(t backend.Type, done <-chan struct{}) <-chan string { // TODO: use os.Open() and d.Readdirnames() instead of Glob() var pattern string if t == backend.Data { pattern = filepath.Join(dirname(b.p, t, ""), "*", "*") } else { pattern = filepath.Join(dirname(b.p, t, ""), "*") } ch := make(chan string) matches, err := filepath.Glob(pattern) if err != nil { close(ch) return ch } for i := range matches { matches[i] = filepath.Base(matches[i]) } sort.Strings(matches) go func() { defer close(ch) for _, m := range matches { if m == "" { continue } select { case ch <- m: case <-done: return } } }() return ch } // Delete removes the repository and all files. func (b *Local) Delete() error { return os.RemoveAll(b.p) } // Close does nothing func (b *Local) Close() error { return nil }<|fim▁end|>
<|file_name|>moc_cookiejar.cpp<|end_file_name|><|fim▁begin|>/**************************************************************************** ** Meta object code from reading C++ file 'cookiejar.h' ** ** Created: Fri May 7 07:20:45 2010 ** by: The Qt Meta Object Compiler version 62 (Qt 4.6.2) ** ** WARNING! All changes made in this file will be lost! *****************************************************************************/ #include "../cookiejar.h" #if !defined(Q_MOC_OUTPUT_REVISION) #error "The header file 'cookiejar.h' doesn't include <QObject>." #elif Q_MOC_OUTPUT_REVISION != 62 #error "This file was generated using the moc from 4.6.2. It" #error "cannot be used with the include files from this version of Qt." #error "(The moc has changed too much.)" #endif QT_BEGIN_MOC_NAMESPACE static const uint qt_meta_data_CookieJar[] = { // content: 4, // revision 0, // classname 0, 0, // classinfo 4, 14, // methods 5, 34, // properties 2, 49, // enums/sets 0, 0, // constructors 0, // flags 1, // signalCount // signals: signature, parameters, type, tag, flags 11, 10, 10, 10, 0x05, // slots: signature, parameters, type, tag, flags 28, 10, 10, 10, 0x0a, 36, 10, 10, 10, 0x0a, 51, 10, 10, 10, 0x08, // properties: name, type, flags 71, 58, 0x0009510b, 95, 84, 0x0009510b, 118, 106, 0x0b095103, 133, 106, 0x0b095103, 148, 106, 0x0b095103, // enums: name, flags, count, data 58, 0x0, 3, 57, 84, 0x0, 3, 63, // enum data: key, value 171, uint(CookieJar::AcceptAlways), 184, uint(CookieJar::AcceptNever), 196, uint(CookieJar::AcceptOnlyFromSitesNavigatedTo), 227, uint(CookieJar::KeepUntilExpire), 243, uint(CookieJar::KeepUntilExit), 257, uint(CookieJar::KeepUntilTimeLimit), 0 // eod }; static const char qt_meta_stringdata_CookieJar[] = { "CookieJar\0\0cookiesChanged()\0clear()\0" "loadSettings()\0save()\0AcceptPolicy\0" "acceptPolicy\0KeepPolicy\0keepPolicy\0" "QStringList\0blockedCookies\0allowedCookies\0" "allowForSessionCookies\0AcceptAlways\0" "AcceptNever\0AcceptOnlyFromSitesNavigatedTo\0" "KeepUntilExpire\0KeepUntilExit\0" "KeepUntilTimeLimit\0" }; const QMetaObject CookieJar::staticMetaObject = { { &QNetworkCookieJar::staticMetaObject, qt_meta_stringdata_CookieJar, qt_meta_data_CookieJar, 0 } }; #ifdef Q_NO_DATA_RELOCATION const QMetaObject &CookieJar::getStaticMetaObject() { return staticMetaObject; } #endif //Q_NO_DATA_RELOCATION const QMetaObject *CookieJar::metaObject() const { return QObject::d_ptr->metaObject ? QObject::d_ptr->metaObject : &staticMetaObject; } void *CookieJar::qt_metacast(const char *_clname) { if (!_clname) return 0; if (!strcmp(_clname, qt_meta_stringdata_CookieJar)) return static_cast<void*>(const_cast< CookieJar*>(this)); return QNetworkCookieJar::qt_metacast(_clname); } int CookieJar::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QNetworkCookieJar::qt_metacall(_c, _id, _a); if (_id < 0) return _id; if (_c == QMetaObject::InvokeMetaMethod) { switch (_id) { case 0: cookiesChanged(); break; case 1: clear(); break; case 2: loadSettings(); break; case 3: save(); break; default: ; } _id -= 4; } #ifndef QT_NO_PROPERTIES else if (_c == QMetaObject::ReadProperty) { void *_v = _a[0]; switch (_id) { case 0: *reinterpret_cast< AcceptPolicy*>(_v) = acceptPolicy(); break; case 1: *reinterpret_cast< KeepPolicy*>(_v) = keepPolicy(); break; case 2: *reinterpret_cast< QStringList*>(_v) = blockedCookies(); break; case 3: *reinterpret_cast< QStringList*>(_v) = allowedCookies(); break; case 4: *reinterpret_cast< QStringList*>(_v) = allowForSessionCookies(); break; } _id -= 5; } else if (_c == QMetaObject::WriteProperty) { void *_v = _a[0]; switch (_id) { case 0: setAcceptPolicy(*reinterpret_cast< AcceptPolicy*>(_v)); break; case 1: setKeepPolicy(*reinterpret_cast< KeepPolicy*>(_v)); break; case 2: setBlockedCookies(*reinterpret_cast< QStringList*>(_v)); break; case 3: setAllowedCookies(*reinterpret_cast< QStringList*>(_v)); break; case 4: setAllowForSessionCookies(*reinterpret_cast< QStringList*>(_v)); break; } _id -= 5; } else if (_c == QMetaObject::ResetProperty) { _id -= 5; } else if (_c == QMetaObject::QueryPropertyDesignable) { _id -= 5; } else if (_c == QMetaObject::QueryPropertyScriptable) { _id -= 5; } else if (_c == QMetaObject::QueryPropertyStored) { _id -= 5; } else if (_c == QMetaObject::QueryPropertyEditable) { _id -= 5; } else if (_c == QMetaObject::QueryPropertyUser) { _id -= 5; } #endif // QT_NO_PROPERTIES return _id; } // SIGNAL 0 void CookieJar::cookiesChanged() { QMetaObject::activate(this, &staticMetaObject, 0, 0); } static const uint qt_meta_data_CookieModel[] = { // content: 4, // revision 0, // classname 0, 0, // classinfo 1, 14, // methods 0, 0, // properties 0, 0, // enums/sets 0, 0, // constructors 0, // flags 0, // signalCount // slots: signature, parameters, type, tag, flags 13, 12, 12, 12, 0x08, 0 // eod }; static const char qt_meta_stringdata_CookieModel[] = { "CookieModel\0\0cookiesChanged()\0" }; const QMetaObject CookieModel::staticMetaObject = { { &QAbstractTableModel::staticMetaObject, qt_meta_stringdata_CookieModel, qt_meta_data_CookieModel, 0 } }; #ifdef Q_NO_DATA_RELOCATION const QMetaObject &CookieModel::getStaticMetaObject() { return staticMetaObject; } #endif //Q_NO_DATA_RELOCATION const QMetaObject *CookieModel::metaObject() const { return QObject::d_ptr->metaObject ? QObject::d_ptr->metaObject : &staticMetaObject; } void *CookieModel::qt_metacast(const char *_clname) { if (!_clname) return 0; if (!strcmp(_clname, qt_meta_stringdata_CookieModel)) return static_cast<void*>(const_cast< CookieModel*>(this)); return QAbstractTableModel::qt_metacast(_clname); } int CookieModel::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QAbstractTableModel::qt_metacall(_c, _id, _a); if (_id < 0) return _id; if (_c == QMetaObject::InvokeMetaMethod) { switch (_id) { case 0: cookiesChanged(); break; default: ; } _id -= 1; } return _id; } static const uint qt_meta_data_CookiesDialog[] = { // content: 4, // revision 0, // classname 0, 0, // classinfo 0, 0, // methods 0, 0, // properties 0, 0, // enums/sets 0, 0, // constructors 0, // flags 0, // signalCount 0 // eod }; static const char qt_meta_stringdata_CookiesDialog[] = { "CookiesDialog\0" }; const QMetaObject CookiesDialog::staticMetaObject = { { &QDialog::staticMetaObject, qt_meta_stringdata_CookiesDialog, qt_meta_data_CookiesDialog, 0 } }; #ifdef Q_NO_DATA_RELOCATION const QMetaObject &CookiesDialog::getStaticMetaObject() { return staticMetaObject; } #endif //Q_NO_DATA_RELOCATION const QMetaObject *CookiesDialog::metaObject() const { return QObject::d_ptr->metaObject ? QObject::d_ptr->metaObject : &staticMetaObject; } void *CookiesDialog::qt_metacast(const char *_clname) { if (!_clname) return 0; if (!strcmp(_clname, qt_meta_stringdata_CookiesDialog)) return static_cast<void*>(const_cast< CookiesDialog*>(this)); if (!strcmp(_clname, "Ui_CookiesDialog")) return static_cast< Ui_CookiesDialog*>(const_cast< CookiesDialog*>(this)); return QDialog::qt_metacast(_clname); } int CookiesDialog::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QDialog::qt_metacall(_c, _id, _a); if (_id < 0) return _id; return _id; } static const uint qt_meta_data_CookieExceptionsModel[] = { // content: 4, // revision 0, // classname 0, 0, // classinfo 0, 0, // methods 0, 0, // properties 0, 0, // enums/sets 0, 0, // constructors 0, // flags 0, // signalCount 0 // eod }; static const char qt_meta_stringdata_CookieExceptionsModel[] = { "CookieExceptionsModel\0" }; const QMetaObject CookieExceptionsModel::staticMetaObject = { { &QAbstractTableModel::staticMetaObject, qt_meta_stringdata_CookieExceptionsModel, qt_meta_data_CookieExceptionsModel, 0 } }; #ifdef Q_NO_DATA_RELOCATION const QMetaObject &CookieExceptionsModel::getStaticMetaObject() { return staticMetaObject; } #endif //Q_NO_DATA_RELOCATION <|fim▁hole|> return QObject::d_ptr->metaObject ? QObject::d_ptr->metaObject : &staticMetaObject; } void *CookieExceptionsModel::qt_metacast(const char *_clname) { if (!_clname) return 0; if (!strcmp(_clname, qt_meta_stringdata_CookieExceptionsModel)) return static_cast<void*>(const_cast< CookieExceptionsModel*>(this)); return QAbstractTableModel::qt_metacast(_clname); } int CookieExceptionsModel::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QAbstractTableModel::qt_metacall(_c, _id, _a); if (_id < 0) return _id; return _id; } static const uint qt_meta_data_CookiesExceptionsDialog[] = { // content: 4, // revision 0, // classname 0, 0, // classinfo 4, 14, // methods 0, 0, // properties 0, 0, // enums/sets 0, 0, // constructors 0, // flags 0, // signalCount // slots: signature, parameters, type, tag, flags 25, 24, 24, 24, 0x08, 33, 24, 24, 24, 0x08, 41, 24, 24, 24, 0x08, 64, 59, 24, 24, 0x08, 0 // eod }; static const char qt_meta_stringdata_CookiesExceptionsDialog[] = { "CookiesExceptionsDialog\0\0block()\0" "allow()\0allowForSession()\0text\0" "textChanged(QString)\0" }; const QMetaObject CookiesExceptionsDialog::staticMetaObject = { { &QDialog::staticMetaObject, qt_meta_stringdata_CookiesExceptionsDialog, qt_meta_data_CookiesExceptionsDialog, 0 } }; #ifdef Q_NO_DATA_RELOCATION const QMetaObject &CookiesExceptionsDialog::getStaticMetaObject() { return staticMetaObject; } #endif //Q_NO_DATA_RELOCATION const QMetaObject *CookiesExceptionsDialog::metaObject() const { return QObject::d_ptr->metaObject ? QObject::d_ptr->metaObject : &staticMetaObject; } void *CookiesExceptionsDialog::qt_metacast(const char *_clname) { if (!_clname) return 0; if (!strcmp(_clname, qt_meta_stringdata_CookiesExceptionsDialog)) return static_cast<void*>(const_cast< CookiesExceptionsDialog*>(this)); if (!strcmp(_clname, "Ui_CookiesExceptionsDialog")) return static_cast< Ui_CookiesExceptionsDialog*>(const_cast< CookiesExceptionsDialog*>(this)); return QDialog::qt_metacast(_clname); } int CookiesExceptionsDialog::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QDialog::qt_metacall(_c, _id, _a); if (_id < 0) return _id; if (_c == QMetaObject::InvokeMetaMethod) { switch (_id) { case 0: block(); break; case 1: allow(); break; case 2: allowForSession(); break; case 3: textChanged((*reinterpret_cast< const QString(*)>(_a[1]))); break; default: ; } _id -= 4; } return _id; } QT_END_MOC_NAMESPACE<|fim▁end|>
const QMetaObject *CookieExceptionsModel::metaObject() const {
<|file_name|>FootpathRemoveAction.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************** * Copyright (c) 2014-2020 OpenRCT2 developers * * For a complete list of all authors, please refer to contributors.md * Interested in contributing? Visit https://github.com/OpenRCT2/OpenRCT2 * * OpenRCT2 is licensed under the GNU General Public License version 3. *****************************************************************************/ #include "FootpathRemoveAction.h" #include "../Cheats.h" #include "../OpenRCT2.h" #include "../core/MemoryStream.h" #include "../interface/Window.h" #include "../localisation/StringIds.h" #include "../management/Finance.h" #include "../world/Footpath.h" #include "../world/Location.hpp" #include "../world/Park.h" #include "../world/Wall.h" #include "BannerRemoveAction.h" FootpathRemoveAction::FootpathRemoveAction(const CoordsXYZ& location) : _loc(location) { } void FootpathRemoveAction::AcceptParameters(GameActionParameterVisitor& visitor) { visitor.Visit(_loc); } uint16_t FootpathRemoveAction::GetActionFlags() const { return GameAction::GetActionFlags(); } void FootpathRemoveAction::Serialise(DataSerialiser& stream) { GameAction::Serialise(stream); stream << DS_TAG(_loc); } GameActions::Result::Ptr FootpathRemoveAction::Query() const { GameActions::Result::Ptr res = std::make_unique<GameActions::Result>(); res->Cost = 0; res->Expenditure = ExpenditureType::Landscaping; res->Position = { _loc.x + 16, _loc.y + 16, _loc.z }; if (!LocationValid(_loc)) { return MakeResult(GameActions::Status::NotOwned, STR_CANT_REMOVE_FOOTPATH_FROM_HERE, STR_LAND_NOT_OWNED_BY_PARK); } if (!((gScreenFlags & SCREEN_FLAGS_SCENARIO_EDITOR) || gCheatsSandboxMode) && !map_is_location_owned(_loc)) { return MakeResult(GameActions::Status::NotOwned, STR_CANT_REMOVE_FOOTPATH_FROM_HERE, STR_LAND_NOT_OWNED_BY_PARK); } TileElement* footpathElement = GetFootpathElement(); if (footpathElement == nullptr) { return MakeResult(GameActions::Status::InvalidParameters, STR_CANT_REMOVE_FOOTPATH_FROM_HERE); } res->Cost = GetRefundPrice(footpathElement); return res; } GameActions::Result::Ptr FootpathRemoveAction::Execute() const { GameActions::Result::Ptr res = std::make_unique<GameActions::Result>(); res->Cost = 0; res->Expenditure = ExpenditureType::Landscaping; res->Position = { _loc.x + 16, _loc.y + 16, _loc.z }; if (!(GetFlags() & GAME_COMMAND_FLAG_GHOST)) { footpath_interrupt_peeps(_loc); footpath_remove_litter(_loc); } TileElement* footpathElement = GetFootpathElement(); if (footpathElement != nullptr) { footpath_queue_chain_reset(); auto bannerRes = RemoveBannersAtElement(_loc, footpathElement); if (bannerRes->Error == GameActions::Status::Ok) { res->Cost += bannerRes->Cost; } footpath_remove_edges_at(_loc, footpathElement); map_invalidate_tile_full(_loc); tile_element_remove(footpathElement); footpath_update_queue_chains(); // Remove the spawn point (if there is one in the current tile) gPeepSpawns.erase( std::remove_if( gPeepSpawns.begin(), gPeepSpawns.end(), [this](const CoordsXYZ& spawn) { { return spawn.ToTileStart() == _loc.ToTileStart(); } }), gPeepSpawns.end()); } else { return MakeResult(GameActions::Status::InvalidParameters, STR_CANT_REMOVE_FOOTPATH_FROM_HERE); } res->Cost += GetRefundPrice(footpathElement); return res; } TileElement* FootpathRemoveAction::GetFootpathElement() const { bool getGhostPath = GetFlags() & GAME_COMMAND_FLAG_GHOST; TileElement* tileElement = map_get_footpath_element(_loc); TileElement* footpathElement = nullptr; if (tileElement != nullptr) { if (getGhostPath && !tileElement->IsGhost()) { while (!(tileElement++)->IsLastForTile()) { if (tileElement->GetType() != TILE_ELEMENT_TYPE_PATH && !tileElement->IsGhost()) { continue; } footpathElement = tileElement; break; } } else { footpathElement = tileElement; } } return footpathElement; } money32 FootpathRemoveAction::GetRefundPrice(TileElement* footpathElement) const { money32 cost = -MONEY(10, 00); return cost; } /** * * rct2: 0x006BA23E */ GameActions::Result::Ptr FootpathRemoveAction::RemoveBannersAtElement(const CoordsXY& loc, TileElement* tileElement) const { auto result = MakeResult();<|fim▁hole|> if (tileElement->GetType() == TILE_ELEMENT_TYPE_PATH) return result; if (tileElement->GetType() != TILE_ELEMENT_TYPE_BANNER) continue; auto bannerRemoveAction = BannerRemoveAction({ loc, tileElement->GetBaseZ(), tileElement->AsBanner()->GetPosition() }); bool isGhost = tileElement->IsGhost(); auto bannerFlags = GetFlags() | (isGhost ? static_cast<uint32_t>(GAME_COMMAND_FLAG_GHOST) : 0); bannerRemoveAction.SetFlags(bannerFlags); auto res = GameActions::ExecuteNested(&bannerRemoveAction); // Ghost removal is free if (res->Error == GameActions::Status::Ok && !isGhost) { result->Cost += res->Cost; } tileElement--; } return result; }<|fim▁end|>
while (!(tileElement++)->IsLastForTile()) {
<|file_name|>IAppJiyoujiaHeadDao.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import cn.cloudself.model.AppJiyoujiaHeadEntity; import cn.cloudself.model.IntegerEntity; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.repository.Repository; import java.util.List; /** * @author HerbLuo * @version 1.0.0.d * <p> * change logs: * 2017/5/19 HerbLuo 首次创建 */ public interface IAppJiyoujiaHeadDao extends Repository<AppJiyoujiaHeadEntity, Integer> { List<AppJiyoujiaHeadEntity> getDoubleColumn(int start, int length); /** * Max of 各类型(type 放置于左边还是右边)的记录数 * 如:type为0的记录数有3个,type为1的记录数有4个,返回结果就为4 */ IntegerEntity maxCountOfDoubleColumn(); Page<AppJiyoujiaHeadEntity> findByType(byte type, Pageable pageable); }<|fim▁end|>
package cn.cloudself.dao;
<|file_name|>hr_payroll_pay_commission.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Original Module by SIESA (<http://www.siesacr.com>) # Refactored by CLEARCORP S.A. (<http://clearcorp.co.cr>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # license, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. ############################################################################## from openerp.osv import osv, fields class Payment(osv.Model): """Commissions Payroll Payment""" _name = 'hr.payroll.pay.commission.payment' _description = __doc__ def _check_amount_paid(self, cr, uid, ids, context=None):<|fim▁hole|> return True _columns = { 'commission_id': fields.many2one('sale.commission.commission', string='Commission'), 'invoice_id': fields.related('commission_id', 'invoice_id', type='many2one', obj='account.invoice', string='Invoice', readonly=True), 'input_id': fields.many2one('hr.payslip.input', ondelete='restrict', string='Input'), 'slip_id':fields.related('input_id', 'payslip_id', type='many2one', string='Payslip', obj='hr.payslip', readonly=True, store=True), 'amount_paid': fields.float('Amount Paid', digits=(16,2)), } _constraints = [(_check_amount_paid, 'Value must be greater or equal than 0.', ['amount_paid'])]<|fim▁end|>
for payment in self.browse(cr, uid, ids, context=context): if payment.amount_paid <= 0.0: return False
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>// URI - 1195 // Implementar um TAD ABB (Arvore Binaria de Busca) com as funcoes de insercao e liberacao #include <iostream> #include <cstdio> using namespace std; // Estrutura da ABB struct Arv { int valor; Arv *esq; Arv *dir; }typedef Arv; // Funcoes da ABB Arv* init (); Arv* busca (Arv *r, int v); Arv* insere (Arv *r, int v); bool vazia (Arv *r); Arv* libera (Arv *r); void imprimePos (Arv *r); void imprimeIn (Arv *r); void imprimePre (Arv *r); // Inicializa a ABB Arv* init () { return NULL; } Arv* insere (Arv *r, int valor) { // ABB esta vazia if (r == NULL) { r = new Arv(); r->valor = valor; r->esq = NULL; r->dir = NULL; } // ABB ja possui elementos // Valor eh menor que o da raiz, procurar na sub.arvore esquerda else if (valor < r->valor) r->esq = insere(r->esq,valor); // Valor eh maior que o da raiz, procurar na sub.arvore direita else r->dir = insere(r->dir,valor); return r; } // Imprime em Pre-Ordem void imprimePre (Arv *r) { if (r != NULL) { printf(" %d",r->valor); imprimePre(r->esq); imprimePre(r->dir); } } // Imprime em In-Ordem void imprimeIn (Arv *r) { if (r != NULL) { imprimeIn(r->esq); printf(" %d",r->valor); imprimeIn(r->dir); } } // Imprime em Pos-Ordem<|fim▁hole|> imprimePos(r->esq); imprimePos(r->dir); printf(" %d",r->valor); } } bool vazia (Arv *r) { if (r == NULL) return true; else return false; } Arv* libera (Arv *r) { if (!vazia(r)) { r->esq = libera(r->esq); r->dir = libera(r->dir); delete r; } return NULL; } int main () { int K, N; int valor; scanf("%d",&K); for (int k = 0; k < K; k++) { // Montar a arvore Arv *arv = init(); scanf("%d",&N); for (int i = 0; i < N; i++) { scanf("%d",&valor); arv = insere(arv,valor); } // Imprime a arvore de acordo com a saida printf("Case %d:\n",k+1); printf("Pre.:"); imprimePre(arv); printf("\n"); printf("In..:"); imprimeIn(arv); printf("\n"); printf("Post:"); imprimePos(arv); printf("\n"); // Libera arvore para a proxima entrada libera(arv); printf("\n"); } }<|fim▁end|>
void imprimePos (Arv *r) { if (r != NULL) {
<|file_name|>InteractionObjectFactory.js<|end_file_name|><|fim▁begin|>// Copyright 2017 The Oppia Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Factory for creating new frontend instances of Interaction * domain objects. */ oppia.factory('InteractionObjectFactory', [ 'AnswerGroupObjectFactory', 'HintObjectFactory', 'OutcomeObjectFactory', 'SolutionObjectFactory', function( AnswerGroupObjectFactory, HintObjectFactory, OutcomeObjectFactory, SolutionObjectFactory) { var Interaction = function( answerGroups, confirmedUnclassifiedAnswers, customizationArgs, defaultOutcome, hints, id, solution) { this.answerGroups = answerGroups; this.confirmedUnclassifiedAnswers = confirmedUnclassifiedAnswers; this.customizationArgs = customizationArgs; this.defaultOutcome = defaultOutcome; this.hints = hints; this.id = id; this.solution = solution; }; Interaction.prototype.toBackendDict = function() { return { answer_groups: this.answerGroups.map(function(answerGroup) { return answerGroup.toBackendDict(); }), confirmed_unclassified_answers: this.confirmedUnclassifiedAnswers, customization_args: this.customizationArgs, default_outcome: this.defaultOutcome ? this.defaultOutcome.toBackendDict() : null, hints: this.hints.map(function(hint) { return hint.toBackendDict(); }), id: this.id, solution: this.solution ? this.solution.toBackendDict() : null }; }; Interaction.createFromBackendDict = function(interactionDict) {<|fim▁hole|> if (interactionDict.default_outcome) { defaultOutcome = OutcomeObjectFactory.createFromBackendDict( interactionDict.default_outcome); } else { defaultOutcome = null; } return new Interaction( generateAnswerGroupsFromBackend(interactionDict.answer_groups), interactionDict.confirmed_unclassified_answers, interactionDict.customization_args, defaultOutcome, generateHintsFromBackend(interactionDict.hints), interactionDict.id, interactionDict.solution ? ( generateSolutionFromBackend(interactionDict.solution)) : null); }; var generateAnswerGroupsFromBackend = function(answerGroupBackendDicts) { return answerGroupBackendDicts.map(function( answerGroupBackendDict) { return AnswerGroupObjectFactory.createFromBackendDict( answerGroupBackendDict); }); }; var generateHintsFromBackend = function(hintBackendDicts) { return hintBackendDicts.map(function(hintBackendDict) { return HintObjectFactory.createFromBackendDict(hintBackendDict); }); }; var generateSolutionFromBackend = function(solutionBackendDict) { return SolutionObjectFactory.createFromBackendDict(solutionBackendDict); }; return Interaction; } ]);<|fim▁end|>
var defaultOutcome;
<|file_name|>ui.rs<|end_file_name|><|fim▁begin|>extern crate rustbox; use self::rustbox::{Color, Style, RustBox}; use super::board::{Board, HEIGHT, WIDTH}; use super::tetromino::{Tetromino, TetrominoType}; use super::window::Window; // Default scaling factor for the board const SCALE: usize = 2; // Default values for styling terminal output const DEFAULT_STYLE: Style = rustbox::RB_NORMAL; const DEFAULT_FG: Color = Color::White; const DEFAULT_BG: Color = Color::Black; /// A collection of Window structs representing the user interface pub struct Ui<'a> { board: Window<'a>, score: Window<'a>, level: Window<'a>, lines: Window<'a>, next: Window<'a>, hold: Window<'a>, } impl<'a> Ui<'a> { /// Initializes a new Ui struct pub fn new(rb: &'a RustBox) -> Self { Ui { board: Window::new(0, 5, (11 * SCALE) - 1, 21, rb), score: Window::new(12 * SCALE, 6, 11, 1, rb), level: Window::new(12 * SCALE, 10, 11, 1, rb), lines: Window::new(12 * SCALE, 14, 11, 1, rb), next: Window::new(5 , 1, (5 * SCALE) + 1, 4, rb), hold: Window::new(12 * SCALE, 18, (5 * SCALE) + 1, 5, rb), } } /// Setup the default elements of the user interface pub fn setup(&self) { self.board.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG); self.next.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG); self.hold.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG); self.print_score(0); self.print_level(0); self.print_lines(0); } /// Resets some of the user interface elements pub fn reset(&self) { self.score.clear(); self.level.clear(); self.lines.clear(); self.print_score(0); self.print_level(0); self.print_lines(0); } /// Print the state of the board pub fn print_board(&self, board: &Board) { // Start at 2 because only 20 of the board's rows should be displayed for y in 2..HEIGHT { for x in 0..WIDTH { match board.field()[y][x] { // When printing the board, offset x and y to compensate // for the Window's borders and showing only 20 rows Some(ref mino) => { let color = self.get_tetromino_color(mino); let rune = self.get_tetromino_rune(mino); self.board.print_char((x * SCALE) + 1, y - 1, DEFAULT_STYLE, color, DEFAULT_BG, rune); self.board.print_char((x * SCALE) + 2, y - 1, DEFAULT_STYLE, color, DEFAULT_BG, rune); } None => { self.board.print_char((x * SCALE) + 1, y - 1, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, ' '); self.board.print_char((x * SCALE) + 2, y - 1, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, '.'); }, } } } } /// Gets the character associated with a TetrominoType fn get_tetromino_rune(&self, tetromino_type: &TetrominoType) -> char {<|fim▁hole|> match tetromino_type { &TetrominoType::Ghost => '□', _ => '■', } } /// Gets the color associated with a TetrominoType fn get_tetromino_color(&self, tetromino_type: &TetrominoType) -> Color { match tetromino_type { &TetrominoType::I => Color::Cyan, &TetrominoType::J => Color::Blue, &TetrominoType::L | &TetrominoType::Ghost => Color::White, &TetrominoType::O => Color::Yellow, &TetrominoType::S => Color::Green, &TetrominoType::T => Color::Magenta, &TetrominoType::Z => Color::Red, } } /// Prints the next Tetromino pub fn print_next(&self, tetromino: Tetromino) { self.print_tetromino(tetromino, &self.next); } /// Prints the hold Tetromino pub fn print_hold(&self, hold: Option<Tetromino>) { if let Some(tetromino) = hold { self.print_tetromino(tetromino, &self.hold); } } // Prints a Tetromino to a specified Window fn print_tetromino(&self, tetromino: Tetromino, window: &Window) { window.clear(); window.print_borders(DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG); for &mino in tetromino.minos().iter() { let color = self.get_tetromino_color(&tetromino.tetromino_type()); window.print_char(((mino.x as usize) * SCALE + 2), (mino.y + 1) as usize, DEFAULT_STYLE, color, DEFAULT_BG, '■'); window.print_char(((mino.x as usize) * SCALE + 3), (mino.y + 1) as usize, DEFAULT_STYLE, color, DEFAULT_BG, '■'); } } /// Prints the player's score pub fn print_score(&self, score: usize) { self.score.print(0, 0, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, &format!("{:}", score)); } /// Prints the difficulty level pub fn print_level(&self, level: usize) { self.level.print(0, 0, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, &format!("{:}", level)); } /// Prints the number of lines cleared pub fn print_lines(&self, lines: usize) { self.lines.print(0, 0, DEFAULT_STYLE, DEFAULT_FG, DEFAULT_BG, &format!("{:}", lines)); } }<|fim▁end|>
<|file_name|>base.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals import os import os.path from freight.constants import PROJECT_ROOT from freight.exceptions import CommandError class UnknownRevision(CommandError): pass class Vcs(object): ssh_connect_path = os.path.join(PROJECT_ROOT, 'bin', 'ssh-connect') def __init__(self, workspace, url, username=None): self.url = url self.username = username self.workspace = workspace self._path_exists = None @property def path(self): return self.workspace.path def get_default_env(self): return {} def run(self, command, capture=False, workspace=None, *args, **kwargs): if workspace is None: workspace = self.workspace if not self.exists(workspace=workspace): kwargs.setdefault('cwd', None) env = kwargs.pop('env', {}) for key, value in self.get_default_env().iteritems(): env.setdefault(key, value) env.setdefault('FREIGHT_SSH_REPO', self.url) kwargs['env'] = env if capture: handler = workspace.capture else: handler = workspace.run rv = handler(command, *args, **kwargs) if isinstance(rv, basestring):<|fim▁hole|> return rv def exists(self, workspace=None): if workspace is None: workspace = self.workspace return os.path.exists(workspace.path) def clone_or_update(self): if self.exists(): self.update() else: self.clone() def clone(self): raise NotImplementedError def update(self): raise NotImplementedError def checkout(self, ref): raise NotImplementedError def describe(self, ref): """ Given a `ref` return the fully qualified version. """ raise NotImplementedError def get_default_revision(self): raise NotImplementedError<|fim▁end|>
return rv.strip()
<|file_name|>compat.py<|end_file_name|><|fim▁begin|>try: from django.utils.encoding import force_text # noqa except ImportError: from django.utils.encoding import force_unicode as force_text # noqa <|fim▁hole|>try: from urllib2 import urlopen # noqa except ImportError: from urllib.request import urlopen # noqa<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! GameCube archive and compression format. //pub mod compress; pub mod decompress; #[derive(Clone, Debug)] pub struct GslHeader { pub name: String, pub offset: u32, pub size: u32 } #[derive(Clone, Debug)] pub struct GslFile { pub name: String, pub data: Vec<u8> } pub use self::decompress::decompress_le;<|fim▁hole|>pub use self::decompress::decompress_be; pub use self::decompress::decompress_guess;<|fim▁end|>
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin from .models import Lesson, Course, CourseLead, QA # from django.utils.translation import ugettext_lazy as _ from ordered_model.admin import OrderedModelAdmin from core.models import User # from adminfilters.models import Species, Breed class UserAdminInline(admin.TabularInline): model = User @admin.register(Lesson) class LessonAdmin(admin.ModelAdmin): ordering = ['-start'] list_filter = ('student', ) list_display = ('start', 'student') save_as = True # raw_id_fields = ("student",) # inlines = [UserAdminInline] @admin.register(Course) class CourseAdmin(admin.ModelAdmin): list_display = ('name', 'slug', 'published', ) ordering = ['id'] @admin.register(CourseLead) class CourseLeadAdmin(admin.ModelAdmin): list_display = ( 'name', 'contact', 'course', 'status',<|fim▁hole|> list_filter = ('status', ) ordering = ['status'] @admin.register(QA) class QAAdmin(OrderedModelAdmin): list_display = ( 'order', 'question', 'move_up_down_links', ) # list_filter = ('status', ) list_display_links = ('question', ) ordering = ['order']<|fim▁end|>
'student', )
<|file_name|>TouchZoom.js<|end_file_name|><|fim▁begin|>/* * L.Handler.TouchZoom is used internally by L.Map to add touch-zooming on Webkit-powered mobile browsers. */ L.Handler.TouchZoom = L.Handler.extend({ enable: function() { if (!L.Browser.touch || this._enabled) { return; } L.DomEvent.addListener(this._map._container, 'touchstart', this._onTouchStart, this); this._enabled = true; }, disable: function() { if (!this._enabled) { return; } L.DomEvent.removeListener(this._map._container, 'touchstart', this._onTouchStart, this); this._enabled = false; }, _onTouchStart: function(e) { if (!e.touches || e.touches.length != 2 || this._map._animatingZoom) { return; } var p1 = this._map.mouseEventToLayerPoint(e.touches[0]), p2 = this._map.mouseEventToLayerPoint(e.touches[1]), viewCenter = this._map.containerPointToLayerPoint(this._map.getSize().divideBy(2)); this._startCenter = p1.add(p2).divideBy(2, true); this._startDist = p1.distanceTo(p2); //this._startTransform = this._map._mapPane.style.webkitTransform; this._moved = false; this._zooming = true; this._centerOffset = viewCenter.subtract(this._startCenter); L.DomEvent.addListener(document, 'touchmove', this._onTouchMove, this); L.DomEvent.addListener(document, 'touchend', this._onTouchEnd, this); L.DomEvent.preventDefault(e); }, _onTouchMove: function(e) { if (!e.touches || e.touches.length != 2) { return; } if (!this._moved) { this._map._mapPane.className += ' leaflet-zoom-anim'; this._map._prepareTileBg(); this._moved = true; } var p1 = this._map.mouseEventToLayerPoint(e.touches[0]), p2 = this._map.mouseEventToLayerPoint(e.touches[1]); this._scale = p1.distanceTo(p2) / this._startDist; this._delta = p1.add(p2).divideBy(2, true).subtract(this._startCenter); /* * Used 2 translates instead of transform-origin because of a very strange bug - * it didn't count the origin on the first touch-zoom but worked correctly afterwards */ this._map._tileBg.style.webkitTransform = [ L.DomUtil.getTranslateString(this._delta), L.DomUtil.getScaleString(this._scale, this._startCenter) ].join(" "); L.DomEvent.preventDefault(e); }, _onTouchEnd: function(e) { if (!this._moved || !this._zooming) { return; } this._zooming = false; var oldZoom = this._map.getZoom(), floatZoomDelta = Math.log(this._scale)/Math.LN2, roundZoomDelta = (floatZoomDelta > 0 ? Math.ceil(floatZoomDelta) : Math.floor(floatZoomDelta)), zoom = this._map._limitZoom(oldZoom + roundZoomDelta), zoomDelta = zoom - oldZoom, centerOffset = this._centerOffset.subtract(this._delta).divideBy(this._scale), centerPoint = this._map.getPixelOrigin().add(this._startCenter).add(centerOffset), center = this._map.unproject(centerPoint); <|fim▁hole|> var finalScale = Math.pow(2, zoomDelta); this._map._runAnimation(center, zoom, finalScale / this._scale, this._startCenter.add(centerOffset)); } });<|fim▁end|>
L.DomEvent.removeListener(document, 'touchmove', this._onTouchMove); L.DomEvent.removeListener(document, 'touchend', this._onTouchEnd);
<|file_name|>decorators.py<|end_file_name|><|fim▁begin|>""" Copyright: (c) 2012-2014 Artem Nezvigin <[email protected]> License: MIT, see LICENSE for details """ from functools import wraps from flask import g, request, session, render_template, url_for, redirect from faceoff.models.user import find_user def templated(template_name=None): """ Automatically renders a template named after the current endpoint. Will also render the name provided if given. """ def closure(f): @wraps(f) def decorator(*args, **kwargs):<|fim▁hole|> response = f(*args, **kwargs) if response is None: response = {} elif not isinstance(response, dict): return response if template is None: template = '%s.html' % request.endpoint return render_template(template, **response) return decorator return closure def authenticated(f): """ Asserts that an existing logged-in user session is active. If not, redirects to the authenticate gate. """ @wraps(f) def decorator(*args, **kwargs): user_id = session.get('user_id') if user_id is None: return redirect(url_for('gate')) user = find_user(id=user_id) if user is None: return redirect(url_for('gate')) g.current_user = user return f(*args, **kwargs) return decorator<|fim▁end|>
template = template_name
<|file_name|>simpleamt.py<|end_file_name|><|fim▁begin|>import argparse, json import boto3 from boto.mturk.connection import MTurkConnection from boto.mturk.qualification import * from jinja2 import Environment, FileSystemLoader """ A bunch of free functions that we use in all scripts. """ def get_jinja_env(config): """ Get a jinja2 Environment object that we can use to find templates. """ return Environment(loader=FileSystemLoader(config['template_directories'])) def json_file(filename): with open(filename, 'r') as f: return json.load(f) def get_parent_parser(): """ Get an argparse parser with arguments that are always needed """ parser = argparse.ArgumentParser(add_help=False) parser.add_argument('--prod', action='store_false', dest='sandbox', default=True, help="Whether to run on the production AMT site.") parser.add_argument('--hit_ids_file')<|fim▁hole|> def get_mturk_connection_from_args(args): """ Utility method to get an MTurkConnection from argparse args. """ aws_access_key = args.config.get('aws_access_key') aws_secret_key = args.config.get('aws_secret_key') return get_mturk_connection(sandbox=args.sandbox, aws_access_key=aws_access_key, aws_secret_key=aws_secret_key) def get_mturk_connection(sandbox=True, aws_access_key=None, aws_secret_key=None): """ Get a boto mturk connection. This is a thin wrapper over the MTurkConnection constructor; the only difference is a boolean flag to indicate sandbox or not. """ kwargs = {} if aws_access_key is not None: kwargs['aws_access_key_id'] = aws_access_key if aws_secret_key is not None: kwargs['aws_secret_access_key'] = aws_secret_key if sandbox: host = 'mechanicalturk.sandbox.amazonaws.com' else: host='mechanicalturk.amazonaws.com' return MTurkConnection(host=host, **kwargs) def setup_qualifications(hit_properties): """ Replace some of the human-readable keys from the raw HIT properties JSON data structure with boto-specific objects. """ qual = Qualifications() if 'country' in hit_properties: qual.add(LocaleRequirement('In', hit_properties['country'])) del hit_properties['country'] if 'hits_approved' in hit_properties: qual.add(NumberHitsApprovedRequirement('GreaterThan', hit_properties['hits_approved'])) del hit_properties['hits_approved'] if 'percent_approved' in hit_properties: qual.add(PercentAssignmentsApprovedRequirement('GreaterThan', hit_properties['percent_approved'])) del hit_properties['percent_approved'] # qual.add(Requirement(qualification_type_id="3TDQPWMDS877YXAXCWP6LHT0FJRANT",comparator='GreaterThan',integer_value=9)) # 3TDQPWMDS877YXAXCWP6LHT0FJRANT hit_properties['qualifications'] = qual<|fim▁end|>
parser.add_argument('--config', default='config.json', type=json_file) return parser
<|file_name|>matchers.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 Mirantis Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package ginkgoext<|fim▁hole|> type anythingMatcher struct{} func (matcher *anythingMatcher) Match(actual interface{}) (success bool, err error) { return true, nil } func (matcher *anythingMatcher) FailureMessage(actual interface{}) (message string) { return "" } func (matcher *anythingMatcher) NegatedFailureMessage(actual interface{}) (message string) { return "" } // BeAnything returns matcher that matches any value func BeAnything() types.GomegaMatcher { return &anythingMatcher{} }<|fim▁end|>
import ( "github.com/onsi/gomega/types" )
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at<|fim▁hole|>// http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn main() {}<|fim▁end|>
<|file_name|>StartTote.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * Debrief - the Open Source Maritime Analysis Application * http://debrief.info * * (C) 2000-2020, Deep Blue C Technology Ltd * * This library is free software; you can redistribute it and/or * modify it under the terms of the Eclipse Public License v1.0 * (http://www.eclipse.org/legal/epl-v10.html) <|fim▁hole|> * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *******************************************************************************/ package Debrief.Tools.Tote; import MWC.GUI.PlainChart; import MWC.GUI.ToolParent; import MWC.GUI.Tools.Action; import MWC.GUI.Tools.PlainTool; public final class StartTote extends PlainTool { /** * */ private static final long serialVersionUID = 1L; ///////////////////////////////////////////////////////////// // member variables //////////////////////////////////////////////////////////// private final PlainChart _theChart; ///////////////////////////////////////////////////////////// // constructor //////////////////////////////////////////////////////////// public StartTote(final ToolParent theParent, final PlainChart theChart) { super(theParent, "Step Forward", null); _theChart = theChart; } @Override public final void execute() { _theChart.update(); } ///////////////////////////////////////////////////////////// // member functions //////////////////////////////////////////////////////////// @Override public final Action getData() { // return the product return null; } }<|fim▁end|>
*
<|file_name|>test_scale_in.py<|end_file_name|><|fim▁begin|>######## # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. from . import TestScaleBase class TestScaleCompute(TestScaleBase): def test_compute_scale_in_compute(self): expectations = self.deploy_app('scale4') expectations['compute']['new']['install'] = 3 self.deployment_assertions(expectations) expectations = self.scale(parameters={ 'scalable_entity_name': 'compute', 'delta': -1}) expectations['compute']['existing']['install'] = 2 expectations['compute']['removed']['install'] = 1 expectations['compute']['removed']['uninstall'] = 1 self.deployment_assertions(expectations) def test_compute_scale_in_compute_ignore_failure_true(self): expectations = self.deploy_app('scale_ignore_failure') expectations['compute']['new']['install'] = 3 self.deployment_assertions(expectations) expectations = self.scale(parameters={ 'scalable_entity_name': 'compute', 'ignore_failure': True, 'delta': -1}) expectations['compute']['existing']['install'] = 2 expectations['compute']['removed']['install'] = 1 expectations['compute']['removed']['uninstall'] = 1 self.deployment_assertions(expectations) def test_compute_scale_in_compute_ignore_failure_false(self): expectations = self.deploy_app('scale_ignore_failure') expectations['compute']['new']['install'] = 3 self.deployment_assertions(expectations) try: self.scale(parameters={ 'scalable_entity_name': 'compute', 'ignore_failure': False, 'delta': -1}) except RuntimeError as e: self.assertIn( "RuntimeError: Workflow failed: Task failed " "'testmockoperations.tasks.mock_stop_failure'", str(e)) else: self.fail() def test_compute_scale_out_and_in_compute_from_0(self): expectations = self.deploy_app('scale10') expectations['compute']['new']['install'] = 0 self.deployment_assertions(expectations) expectations = self.scale(parameters={ 'scalable_entity_name': 'compute'}) expectations['compute']['new']['install'] = 1 self.deployment_assertions(expectations) expectations = self.scale(parameters={ 'scalable_entity_name': 'compute', 'delta': -1}) expectations['compute']['new']['install'] = 0 expectations['compute']['existing']['install'] = 0 expectations['compute']['removed']['install'] = 1 expectations['compute']['removed']['uninstall'] = 1 self.deployment_assertions(expectations) def test_compute_scale_in_2_compute(self): expectations = self.deploy_app('scale4') expectations['compute']['new']['install'] = 3 self.deployment_assertions(expectations) expectations = self.scale(parameters={ 'scalable_entity_name': 'compute', 'delta': -2}) expectations['compute']['existing']['install'] = 1 expectations['compute']['removed']['install'] = 2 expectations['compute']['removed']['uninstall'] = 2 self.deployment_assertions(expectations) def test_db_contained_in_compute_scale_in_compute(self): expectations = self.deploy_app('scale5') expectations['compute']['new']['install'] = 2 expectations['db']['new']['install'] = 4 expectations['db']['new']['rel_install'] = 8 self.deployment_assertions(expectations) expectations = self.scale(parameters={ 'scalable_entity_name': 'compute', 'delta': -1}) expectations['compute']['existing']['install'] = 1 expectations['compute']['removed']['install'] = 1 expectations['compute']['removed']['uninstall'] = 1 expectations['db']['existing']['install'] = 2 expectations['db']['existing']['rel_install'] = 4 expectations['db']['removed']['install'] = 2 expectations['db']['removed']['uninstall'] = 2 expectations['db']['removed']['rel_install'] = 4 expectations['db']['removed']['rel_uninstall'] = 4 self.deployment_assertions(expectations) def test_db_connected_to_compute_scale_in_db(self): expectations = self.deploy_app('scale6') expectations['compute']['new']['install'] = 2 expectations['db']['new']['install'] = 2 expectations['db']['new']['rel_install'] = 8 self.deployment_assertions(expectations) expectations = self.scale(parameters={ 'scalable_entity_name': 'db', 'delta': -1}) expectations['compute']['existing']['install'] = 2 expectations['db']['existing']['install'] = 1 expectations['db']['existing']['rel_install'] = 4 expectations['db']['removed']['install'] = 1 expectations['db']['removed']['uninstall'] = 1 expectations['db']['removed']['rel_install'] = 4 expectations['db']['removed']['rel_uninstall'] = 4 self.deployment_assertions(expectations) def test_db_connected_to_compute_scale_in_compute(self): expectations = self.deploy_app('scale6') expectations['compute']['new']['install'] = 2 expectations['db']['new']['install'] = 2 expectations['db']['new']['rel_install'] = 8 self.deployment_assertions(expectations) expectations = self.scale(parameters={ 'scalable_entity_name': 'compute', 'delta': -1}) expectations['compute']['existing']['install'] = 1 expectations['compute']['removed']['install'] = 1 expectations['compute']['removed']['uninstall'] = 1 expectations['db']['existing']['install'] = 2 expectations['db']['existing']['rel_install'] = 8 expectations['db']['existing']['rel_uninstall'] = 4 self.deployment_assertions(expectations) def test_db_connected_to_compute_scale_in_and_out_compute_from_0(self): expectations = self.deploy_app('scale11') expectations['compute']['new']['install'] = 0 expectations['db']['new']['install'] = 1 expectations['db']['new']['rel_install'] = 0 self.deployment_assertions(expectations) expectations = self.scale(parameters={ 'scalable_entity_name': 'compute', 'delta': 1}) expectations['compute']['new']['install'] = 1 expectations['compute']['existing']['install'] = 0 expectations['db']['existing']['install'] = 1 expectations['db']['existing']['rel_install'] = 0 expectations['db']['existing']['scale_rel_install'] = 2 self.deployment_assertions(expectations) expectations = self.scale(parameters={ 'scalable_entity_name': 'compute', 'delta': -1}) expectations['compute']['new']['install'] = 0 expectations['compute']['existing']['install'] = 0 expectations['compute']['removed']['install'] = 1 expectations['compute']['removed']['uninstall'] = 1 expectations['db']['existing']['install'] = 1<|fim▁hole|> def test_db_contained_in_compute_scale_in_db_scale_db(self): expectations = self.deploy_app('scale5') expectations['compute']['new']['install'] = 2 expectations['db']['new']['install'] = 4 expectations['db']['new']['rel_install'] = 8 self.deployment_assertions(expectations) expectations = self.scale(parameters={ 'scalable_entity_name': 'db', 'delta': -1, 'scale_compute': False}) expectations['compute']['existing']['install'] = 2 expectations['db']['existing']['install'] = 2 expectations['db']['existing']['rel_install'] = 4 expectations['db']['removed']['install'] = 2 expectations['db']['removed']['uninstall'] = 2 expectations['db']['removed']['rel_install'] = 4 expectations['db']['removed']['rel_uninstall'] = 4 self.deployment_assertions(expectations) def test_db_contained_in_compute_scale_in_db(self): expectations = self.deploy_app('scale5') expectations['compute']['new']['install'] = 2 expectations['db']['new']['install'] = 4 expectations['db']['new']['rel_install'] = 8 self.deployment_assertions(expectations) expectations = self.scale(parameters={ 'scalable_entity_name': 'db', 'delta': -1, 'scale_compute': True}) expectations['compute']['existing']['install'] = 1 expectations['compute']['removed']['install'] = 1 expectations['compute']['removed']['uninstall'] = 1 expectations['db']['existing']['install'] = 2 expectations['db']['existing']['rel_install'] = 4 expectations['db']['removed']['install'] = 2 expectations['db']['removed']['uninstall'] = 2 expectations['db']['removed']['rel_install'] = 4 expectations['db']['removed']['rel_uninstall'] = 4 self.deployment_assertions(expectations)<|fim▁end|>
expectations['db']['existing']['scale_rel_install'] = 2 expectations['db']['existing']['rel_uninstall'] = 2 self.deployment_assertions(expectations)
<|file_name|>send-to-messenger.ts<|end_file_name|><|fim▁begin|>import { post } from 'src/lib/http'; import { log, warn } from 'src/lib/print'; import { messengerAppId } from 'src/store'; import { SendToMessengerEvent } from 'typings/facebook'; import { addClass, setAttributes } from 'src/lib/dom'; import { shallowCopy } from 'src/lib/object'; import { isFunc, isObject } from 'src/lib/assert'; import { WidgetType } from '../helper'; import { BaseWidget, WidgetDataCommon } from './base'; import uuid from 'uuid'; /** * 引用编译元数据 * - 因为点击之后会自动将此处信息发送至 facebook * - 所以这里也能理解为是向 facebook 发送的数据接口 */ interface RefData { /** 数据编号 */ id?: string; /** 数据类型 */ type?: 'feed' | 'receipt'; /** 插件事件标记 */ gateway: 'engagement'; /** 插件编号 */ code: string; } /** 附带的元数据 */ interface MessageMeta { /** 数据类型 */ type: 'feed' | 'receipt'; /** 完整数据 */ data: AnyObject; } /** Send to Messenger 事件名称 */ enum EventName { click = 'click', login = 'login', notYou = 'notYou', rendered = 'rendered', } /** 发送给 bothub 的完整数据 */ type BothubMessage = Required<RefData> & MessageMeta & { page_id: string; }; /** “发送至 Messenger”插件 */ export interface SendToMessengerData extends WidgetDataCommon { /** “发送至 Messenger”插件类型 */ type: WidgetType.SendToMessenger; /** * 主题颜色 * - 默认为`blue` */ color?: 'blue' | 'white'; /** * 插件大小 * - 默认为`large` */ size?: 'standard' | 'large' | 'xlarge'; /** * 如果为 true,则点击该按钮时, * 已登录用户必须重新登录, * 默认为`false`<|fim▁hole|> */ enforceLogin?: boolean; /** * 按钮文本 * - 默认为空 */ ctaText?: 'GET_THIS_IN_MESSENGER' | 'RECEIVE_THIS_IN_MESSENGER' | 'SEND_THIS_TO_ME' | 'GET_CUSTOMER_ASSISTANCE' | 'GET_CUSTOMER_SERVICE' | 'GET_SUPPORT' | 'LET_US_CHAT' | 'SEND_ME_MESSAGES' | 'ALERT_ME_IN_MESSENGER' | 'SEND_ME_UPDATES' | 'MESSAGE_ME' | 'LET_ME_KNOW' | 'KEEP_ME_UPDATED' | 'TELL_ME_MORE' | 'SUBSCRIBE_IN_MESSENGER' | 'SUBSCRIBE_TO_UPDATES' | 'GET_MESSAGES' | 'SUBSCRIBE' | 'GET_STARTED_IN_MESSENGER' | 'LEARN_MORE_IN_MESSENGER' | 'GET_STARTED'; /** 附带的数据 */ message?: MessageMeta | (() => MessageMeta); /** 点击事件 */ [EventName.click]?(): void; /** 登录完成事件 */ [EventName.login]?(): void; /** 更换当前登录账号事件 */ [EventName.notYou]?(): void; /** 渲染完成事件 */ [EventName.rendered]?(): void; } /** facebook “发送至 Messenger”插件属性 */ export type FbSendToMessengerAttrs = Pick<SendToMessengerData, 'color' | 'size' | 'enforceLogin' | 'ctaText' | 'pageId'>; const fbClass = 'fb-send-to-messenger'; const bhClass = 'bothub-send-to-messenger'; /** * [“发送至 Messenger”插件](https://developers.facebook.com/docs/messenger-platform/discovery/send-to-messenger-plugin/) */ export default class SendToMessenger extends BaseWidget<SendToMessengerData> { fbAttrs!: FbSendToMessengerAttrs; /** 是否已经发送数据 */ sent = false; /** 每次事件生成的唯一编号 */ message?: BothubMessage; constructor(data: SendToMessengerData) { super(data); this.init(); this.check(); } /** 引用编译 */ get ref() { const { code, message } = this; const data: RefData = { code, gateway: 'engagement', }; if (message) { data.id = message.id; data.type = message.type; } return `base64:${window.btoa(JSON.stringify(data))}`; } init() { const { origin } = this; this.message = this.getMessage(); this.fbAttrs = shallowCopy(origin, ['color', 'size', 'enforceLogin', 'ctaText', 'pageId']); this.off(); this.on(EventName.click, origin[EventName.click]); this.on(EventName.login, origin[EventName.login]); this.on(EventName.notYou, origin[EventName.notYou]); this.on(EventName.rendered, origin[EventName.rendered]); // 发送消息之后,状态位赋值 this.on('click', () => this.sent = true); // 如果包含有信息,则渲染完成之后发送完整信息 this.on('rendered', () => { const { message } = this; if (message) { post('tr/', message).then(() => this.sent = true); } }); } parse(focus = false) { if ((!focus && this.isRendered) || !this.canRender || !this.$el) { log(`Skip ${this.name} with id ${this.id}`); return; } /** 是否是重复渲染 */ const alreadyRender = this.isRendered; const dom = this.$el.firstElementChild!; this.sent = false; this.isRendered = false; addClass(dom, fbClass); addClass(dom, bhClass); setAttributes(dom, this.fbAttrs); dom.setAttribute('data-ref', this.ref); dom.setAttribute('messenger_app_id', messengerAppId); window.FB.XFBML.parse(this.$el); // 绑定事件 if (!alreadyRender) { window.FB.Event.subscribe('send_to_messenger', (ev: SendToMessengerEvent) => { if (!ev.ref) { warn(`Can not found 'ref' attrubite in this '${this.name}' Plugin`, true); return; } if (ev.ref !== this.ref) { return; } if (ev.event === 'rendered' && !this.isRendered) { log(`${this.name} Plugin with ID ${this.id} has been rendered`); this.isRendered = true; this.emit(EventName.rendered); } else if (ev.event === 'clicked') { this.emit(EventName.click); } else if (ev.event === 'not_you') { this.emit(EventName.notYou); } else if (ev.event === 'opt_in') { this.emit(EventName.login); } }); } } /** 当前插件附带的数据转换为标准格式 */ getMessage() { const { message, pageId } = this.origin; if (!message) { return; } let data: MessageMeta; if (isFunc(message)) { data = message(); if (!data) { return; } } else if (isObject(message)) { data = message; } else { return; } return { ...data, code: this.code, page_id: pageId as string, gateway: 'engagement' as const, id: uuid(), }; } }<|fim▁end|>
<|file_name|>urls.py<|end_file_name|><|fim▁begin|><|fim▁hole|>urlpatterns = patterns('links.views', url(r'^link/settings/$', views.settings, name = 'settings'), url(r'^link/donate/(?P<url>[\d\w.]+)$', views.kintera_redirect, name = 'donate'), url(r'^link/rider/(?P<url>[\d\w.]+)$', views.t4k_redirect, name = 'profile'), )<|fim▁end|>
from django.conf.urls import patterns, url from links import views
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use cmd::{REPLY, UNKNOWN}; use cmd; use con::{Peer, Connection}; use server::{Server}; use super::{RawMessage}; mod registration; mod msg; mod join; mod part; mod mode;<|fim▁hole|>mod ping_pong; mod cap; macro_rules! handle { {$( $command:ident with $handler:path; )*} => { /// Temporary dispatcher pub fn get_handler(message: RawMessage) -> Result<Box<MessageHandler + Send>, Option<RawMessage>> { match message.command() { $(cmd::$command => { let t: Result<Box<$handler>, Option<RawMessage>> = MessageHandler::from_message(message); t.map(|v| v as Box<MessageHandler + Send>) },)* REPLY(_) => { let t: Result<Box<Reply>, Option<RawMessage>> = MessageHandler::from_message(message); t.map(|v| v as Box<MessageHandler + Send>) }, UNKNOWN(_) => { let t: Result<Box<ExtensionHandler>, Option<RawMessage>> = MessageHandler::from_message(message); t.map(|v| v as Box<MessageHandler + Send>) } } } }} handle!{ PRIVMSG with self::msg::Msg; NOTICE with self::msg::Msg; NAMES with self::lists::Names; WHO with self::lists::Who; MODE with self::mode::Mode; JOIN with self::join::Join; TOPIC with self::simple::Topic; PART with self::part::Part; QUIT with self::part::Quit; NICK with self::registration::Nick; USER with self::registration::User; PING with self::ping_pong::Ping; PONG with self::ping_pong::Pong; CAP with self::cap::Cap; } ///// Temporary dispatcher //pub fn get_handler(message: RawMessage) -> Result<Box<MessageHandler + Send>, RawMessage> { // let t: Result<Box<self::join::JoinHandler>, RawMessage> = MessageHandler::from_message(message); // t.map(|v| v as Box<MessageHandler + Send>) //} /// Trait for the various message handlers /// /// The general template for the implementation of new messages is: /// /// ```no_run /// pub struct Handler { /// raw: RawMessage, /// } /// impl Handler { /// fn handle_XX() { /// } /// } /// impl super::MessageHandler for Handler { /// fn from_message(message: RawMessage) -> Result<Box<Handler>, Option<RawMessage>> { /// } /// fn invoke(&self, server: &mut Server, origin: SharedClient) { /// } /// fn raw_message(&self) -> &RawMessage { /// &self.raw /// } /// } /// ``` pub trait MessageHandler { /// Tries to parse the raw message. /// /// Returns the handler for the message or an error message /// if something goes wrong fn from_message(message: RawMessage) -> Result<Box<Self>, Option<RawMessage>>; /// Invokes the message handler. /// /// Since this usually happens on the main event loop, /// the method should avoid time-consuming operations such that the main thread /// is not blocked for an extended time period. fn invoke(&self, server: &mut Server, origin: Peer); /// Invoke the handler for a connection. /// /// This only happens if the client is not registered. The default implementation /// does nothing. Overwrite to influence the registration process. fn invoke_con(&self, _: &mut Server, _: Connection) {} /// Returns the raw message the handler is bases on fn raw_message(&self) -> &RawMessage; } /// Handles (ignores) reply codes from clients struct Reply { raw: RawMessage, } impl MessageHandler for Reply { fn from_message(message: RawMessage) -> Result<Box<Reply>, Option<RawMessage>> { Ok(box Reply { raw: message }) } fn invoke(&self, _: &mut Server, _: Peer) { // Ingore reply codes from clients they are not allowed to send any } fn raw_message(&self) -> &RawMessage { &self.raw } } /// Handles unknown messages. Could be used as an entry point for plugins pub struct ExtensionHandler { raw: RawMessage, } impl MessageHandler for ExtensionHandler { fn from_message(message: RawMessage) -> Result<Box<ExtensionHandler>, Option<RawMessage>> { Ok(box ExtensionHandler { raw: message }) } fn invoke(&self, _: &mut Server, _: Peer) { error!("Handling of message {} not implemented yet", self.raw.command().to_string()) } fn raw_message(&self) -> &RawMessage { &self.raw } }<|fim▁end|>
mod lists; mod simple;
<|file_name|>check_with_sitemap_vpro.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import os import re import subprocess import sys import threading import time import urllib from subprocess import Popen, PIPE sys.path.append("..") from check_with_sitemap import CheckWithSitemap DEFAULT_JAVA_PATH = 'java' class CheckWithSiteMapVpro(CheckWithSitemap): """ This specialization is customized for VPRO. It can connect via JMX to VPRO's Mangolia CMS which contains the original pages, and request it to index missing pages This wraps a command line client for jmx: https://github.com/jiaqi/jmxterm/ """ def __init__(self, java_path: str = DEFAULT_JAVA_PATH): super().__init__() self.jmx_url = self.args.jmx_url self.jmxterm_binary = self.args.jmxterm_binary self.java_path = java_path self._get_jmx_term_if_necessary() if self.args.tunnel: tunnel = SshTunnel(self.log) tunnel.start() def add_arguments(self): super().add_arguments() api = self.api api.add_argument('--jmx_url', type=str, default=None, help='use JMX to trigger reindex. An url like "localhost:500" where this is tunneled to the magnolia backend server')<|fim▁hole|> api.add_argument('--jmxterm_binary', type=str, default=None, help='location of jmxterm binary') api.add_argument('--tunnel', action='store_true', default=False, help='set up jmx tunnel too') def perform_add_to_api(self, not_in_api: list): """ Actually add to api """ if self.jmx_url: self.jmxterm = [self.java_path, '-jar', self.jmxterm_binary, '--url', self.jmx_url, "-n", "-v", "silent"] not_in_api = self._reindex_3voor12(not_in_api) not_in_api = self._reindex_cinema_films(not_in_api) not_in_api = self._reindex_cinema_person(not_in_api) not_in_api = self._reindex_mids(not_in_api) self._reindex_urls(not_in_api) else: self.log.info("No jmx_url configured, not trying to implicitly add to api via JMX") def _reindex_mids(self, not_in_api: list) -> list: urls_with_mid = list(filter(lambda m: m[0] is not None, map(self._find_mid, not_in_api))) return self._reindex_ids(not_in_api, urls_with_mid, "nl.vpro.magnolia:name=IndexerMaintainerImpl", "reindexMediaObjects", 100, "media objects") def _reindex_3voor12(self, not_in_api: list) -> list: urls_with_uuids = list(filter(lambda m: m[0] is not None, map(self._find_update_uuid, not_in_api))) return self._reindex_ids(not_in_api, urls_with_uuids, "nl.vpro.magnolia:name=DrieVoorTwaalfUpdateIndexer", "reindexUUIDs", 100, "3voor12 updates") def _reindex_cinema_films(self, not_in_api: list) -> list: cinema_ids = list(filter(lambda m: m[0] is not None, map(self._find_cinema_film_id, not_in_api))) return self._reindex_ids(not_in_api, cinema_ids, "nl.vpro.magnolia:name=CinemaObjectIndexer", "reindex", 100, "cinema films") def _reindex_cinema_person(self, not_in_api: list) -> list: cinema_ids = list(filter(lambda m: m[0] is not None, map(self._find_cinema_person_uid, not_in_api))) return self._reindex_ids(not_in_api, cinema_ids, "nl.vpro.magnolia:name=CinemaPersonIndexer", "reindex", 100, "cinema persons") def _reindex_urls(self, not_in_api: list) -> None: page_size = 20 self.log.info("Reindexing %d urls" % len(not_in_api)) for i in range(0, len(not_in_api), page_size ): self._call_jmx_operation("nl.vpro.magnolia:name=IndexerMaintainerImpl", "reindexUrls", not_in_api[i: i + page_size ]) def _find_mid(self, url: str) -> list: return self._find_by_regexp(".*?~(.*?)~.*", url) def _find_update_uuid(self, url: str) -> list: return self._find_by_regexp(".*?update~(.*?)~.*", url) def _find_cinema_film_id(self, url: str) -> list: return self._find_by_regexp(".*?film~(.*?)~.*", url) def _find_cinema_person_uid(self, url: str) -> list: return self._find_by_regexp(".*?persoon~(.*?)~.*", url) @staticmethod def _find_by_regexp(regex: str, url: str) -> list: matcher = re.match(regex, url) if matcher: return [matcher.group(1), url] else: return [None, url] def _reindex_ids( self, not_in_api: list, ids: list, bean: str, operation: str, page_size: int, name: str) -> list: self.log.info("Reindexing %d %s" % (len(ids), name)) for i in range(0, len(ids), page_size): self._call_jmx_operation(bean, operation, list(map(lambda m : m[0], ids[i: i + page_size]))) urls = list(map(lambda u: u[1], ids)) self.log.debug("Associated with %s" % str(urls)) return [e for e in not_in_api if e not in urls] def _call_jmx_operation(self, bean: str, operation: str, sub_list: list): p = Popen(self.jmxterm, stdin=PIPE, stdout=PIPE, encoding='utf-8') input = "bean " + bean +"\nrun " + operation + " " + ",".join(sub_list) self.log.info("input\n%s" % input) out, error = p.communicate(input=input, timeout=100) self.log.info("output\n%s" % out) if error: self.log.info("error\n%s" % error) if "still busy" in out: self.log.info("Jmx reports that still busy. Let's wait a bit then") time.sleep(20) def _get_jmx_term_if_necessary(self): if self.jmx_url and not self.jmxterm_binary: from_env = os.getenv('JMXTERM_BINARY') if not from_env is None: self.jmxterm_binary=from_env else: jmxtermversion = "1.0.2" jmxterm = "jmxterm-" + jmxtermversion + "-uber.jar" path = os.path.dirname(os.path.realpath(__file__)) self.jmxterm_binary = os.path.join(path, jmxterm) if not os.path.exists(self.jmxterm_binary): get_url = "https://github.com/jiaqi/jmxterm/releases/download/v" + jmxtermversion + "/" + jmxterm self.log.info("Downloading %s -> %s" % (get_url, self.jmxterm_binary)) urllib.request.urlretrieve (get_url, self.jmxterm_binary) class SshTunnel(threading.Thread): def __init__(self, log): threading.Thread.__init__(self) self.daemon = True # So that thread will exit when # main non-daemon thread finishes self.log = log def run(self): self.log.info("Setting up tunnel") if subprocess.call([ 'ssh', '-N', '-4', '-L', '5000:localhost:5000', 'os2-magnolia-backend-prod-01' ]): raise Exception ('ssh tunnel setup failed') if __name__ == "__main__": CheckWithSiteMapVpro().main()<|fim▁end|>
<|file_name|>HITLayoutParameterJsonUnmarshaller.java<|end_file_name|><|fim▁begin|>/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.mturk.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.mturk.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * HITLayoutParameter JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class HITLayoutParameterJsonUnmarshaller implements Unmarshaller<HITLayoutParameter, JsonUnmarshallerContext> { public HITLayoutParameter unmarshall(JsonUnmarshallerContext context) throws Exception { HITLayoutParameter hITLayoutParameter = new HITLayoutParameter(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null)<|fim▁hole|> if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("Name", targetDepth)) { context.nextToken(); hITLayoutParameter.setName(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("Value", targetDepth)) { context.nextToken(); hITLayoutParameter.setValue(context.getUnmarshaller(String.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return hITLayoutParameter; } private static HITLayoutParameterJsonUnmarshaller instance; public static HITLayoutParameterJsonUnmarshaller getInstance() { if (instance == null) instance = new HITLayoutParameterJsonUnmarshaller(); return instance; } }<|fim▁end|>
break;
<|file_name|>0010_auto__add_field_poll_detailed_chart.py<|end_file_name|><|fim▁begin|># encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Poll.detailed_chart' db.add_column('polls_poll', 'detailed_chart', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False) def backwards(self, orm): # Deleting field 'Poll.detailed_chart' db.delete_column('polls_poll', 'detailed_chart') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),<|fim▁hole|> 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'polls.poll': { 'Meta': {'ordering': "('-id',)", 'object_name': 'Poll'}, 'always_update': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'category_set': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'category_set'", 'null': 'True', 'to': "orm['polls.PollCategorySet']"}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'demographic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'description': ('django.db.models.fields.TextField', [], {}), 'detailed_chart': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'ended': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'secondary_category_set': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'secondary_category_set'", 'null': 'True', 'to': "orm['polls.PollCategorySet']"}), 'secondary_template': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'secondary_template'", 'null': 'True', 'to': "orm['polls.PollCategorySet']"}), 'started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'template': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'template'", 'null': 'True', 'to': "orm['polls.PollCategorySet']"}), 'unknown_message': ('django.db.models.fields.CharField', [], {'max_length': '160'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'polls.pollcategory': { 'Meta': {'unique_together': "(('name', 'category_set'),)", 'object_name': 'PollCategory'}, 'category_set': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'categories'", 'to': "orm['polls.PollCategorySet']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'latitude': ('django.db.models.fields.CharField', [], {'max_length': '80'}), 'longitude': ('django.db.models.fields.CharField', [], {'max_length': '80'}), 'message': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}) }, 'polls.pollcategoryset': { 'Meta': {'object_name': 'PollCategorySet'}, 'description': ('django.db.models.fields.TextField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}), 'poll': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['polls.Poll']", 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'polls.pollkeyword': { 'Meta': {'object_name': 'PollKeyword'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'poll': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'keywords'", 'to': "orm['polls.Poll']"}) }, 'polls.pollresponse': { 'Meta': {'ordering': "('-id',)", 'object_name': 'PollResponse'}, 'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_responses'", 'null': 'True', 'to': "orm['polls.PollCategory']"}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rapidsms_httprouter.Message']"}), 'poll': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'responses'", 'to': "orm['polls.Poll']"}), 'respondent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'responses'", 'to': "orm['polls.Respondent']"}), 'secondary_category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'secondary_responses'", 'null': 'True', 'to': "orm['polls.PollCategory']"}), 'text': ('django.db.models.fields.CharField', [], {'max_length': '160'}) }, 'polls.pollrule': { 'Meta': {'ordering': "('order', '-category')", 'object_name': 'PollRule'}, 'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'rules'", 'to': "orm['polls.PollCategory']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'lower_bound': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'match': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'numeric': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'order': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'upper_bound': ('django.db.models.fields.IntegerField', [], {'null': 'True'}) }, 'polls.respondent': { 'Meta': {'object_name': 'Respondent'}, 'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'active_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'identity': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'db_index': 'True'}), 'last_response': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_respondent'", 'null': 'True', 'to': "orm['polls.PollResponse']"}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'notes': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'polls.tracsettings': { 'Meta': {'object_name': 'TracSettings'}, 'duplicate_message': ('django.db.models.fields.CharField', [], {'max_length': '160'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'recruitment_message': ('django.db.models.fields.CharField', [], {'max_length': '60'}), 'trac_off_response': ('django.db.models.fields.CharField', [], {'max_length': '160'}), 'trac_on_response': ('django.db.models.fields.CharField', [], {'max_length': '160'}) }, 'rapidsms.backend': { 'Meta': {'object_name': 'Backend'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}) }, 'rapidsms.connection': { 'Meta': {'object_name': 'Connection'}, 'backend': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rapidsms.Backend']"}), 'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rapidsms.Contact']", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'identity': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'rapidsms.contact': { 'Meta': {'object_name': 'Contact'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}) }, 'rapidsms_httprouter.message': { 'Meta': {'object_name': 'Message'}, 'connection': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'messages'", 'to': "orm['rapidsms.Connection']"}), 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'direction': ('django.db.models.fields.CharField', [], {'max_length': '1'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'in_response_to': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'responses'", 'null': 'True', 'to': "orm['rapidsms_httprouter.Message']"}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '1'}), 'text': ('django.db.models.fields.TextField', [], {}) } } complete_apps = ['polls']<|fim▁end|>
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
<|file_name|>test_118_appid_table_entry.py<|end_file_name|><|fim▁begin|># Created: 16.03.2011, 2018 rewritten for pytest # Copyright (C) 2011-2019, Manfred Moitzi # License: MIT License import pytest from ezdxf.entities.appid import AppID @pytest.fixture def appid(): return AppID.new(<|fim▁hole|> }, ) def test_name(appid): assert appid.dxf.name == "EZDXF"<|fim▁end|>
"FFFF", dxfattribs={ "name": "EZDXF",
<|file_name|>Set-iterator-order.js<|end_file_name|><|fim▁begin|>// Set iterators produces entries in the order they were inserted. var set = Set(); var i; for (i = 7; i !== 1; i = i * 7 % 1117) set.add(i); assertEq(set.size, 557); i = 7; for (var v of set) { assertEq(v, i); i = i * 7 % 1117; }<|fim▁hole|><|fim▁end|>
assertEq(i, 1);
<|file_name|>IntegrationTests.java<|end_file_name|><|fim▁begin|>package com.example.demo; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; import org.springframework.boot.autoconfigure.mongo.embedded.EmbeddedMongoAutoConfiguration; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.web.server.LocalServerPort; import org.springframework.test.web.reactive.server.WebTestClient; @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) @ImportAutoConfiguration(exclude = EmbeddedMongoAutoConfiguration.class) public class IntegrationTests { @LocalServerPort int port; <|fim▁hole|> // @Autowired // WebTestClient client; @BeforeEach public void setup() { this.client = WebTestClient.bindToServer() .baseUrl("http://localhost:" + this.port) .build(); } @Test public void getAllMessagesShouldBeOk() { client.get().uri("/posts").exchange() .expectStatus().isOk(); } }<|fim▁end|>
WebTestClient client;
<|file_name|>run.py<|end_file_name|><|fim▁begin|>import cPickle import gzip import time import os import sys import cPickle as pickle import gc import numpy as np from time import sleep import auc import theano import theano.tensor as T from theano.tensor.signal import downsample from theano.tensor.nnet import conv from theano.ifelse import ifelse import theano.printing from collections import OrderedDict from logisticRegression import LogisticRegression from layers import DropoutHiddenLayer, HiddenLayer2d, HiddenLayer, ConvolutionalHiddenSoftmax, ConvolutionalLayer import warnings warnings.filterwarnings('ignore') L = 330 n_epochs = 20 Q = 14 NUM_TRAIN = 1200000 #(use multiplicity of 50'000) MINIREAD = 1 batch_size= 1000 ############ ############ uncomment these lines below to verify if the code runs correctly, execution around 15 times faster ############ # n_epochs = 4 # Q = 4 # NUM_TRAIN = 250000 # MINIREAD = 4 # batch_size= 1000 THREAD = 20 span = 1 NN = 1000 POOL = 10 Knormal = 1794 * 100 / MINIREAD learning_rate0 = 0.2; def ReLU(x): y = T.maximum(0.0, x) return (y) def read(s, sn ,sp, Kile): P=[]; lenn = []; nott = 0 _nps = [] _s = [] with open(path+sp) as ff: for line in ff: x,y = line.split(',') P.append([float(x),float(y)]) print "opening" with open(path+s) as f: rlast = []; cnt = 0; arrayprev = []; Ti = []; ile = 0 for line in f: if cnt % 17940 == 0: print str(cnt/1794), "% ", if cnt != 0: pos = 0; r = []; rr2 = np.zeros(NN); rr = np.zeros(NN); rp = [] for x in line.split(','): x_f = float(x) rp.append(x_f) for x in rp: val2 = x - arrayprev[pos] rr[pos] = val2 # to sum pos+=1 nps = np.sum(rr) _w = [_x for _x in rr if _x >= 0.2] _wn = len(_w) if nps < THREAD : if nott > 0: lenn.append(nott); ile+=nott nott = 0 else: nott -=1 else: if nott <= 0: nott = 1 else: nott += 1 pos+=1 if nott >= 1: Ti.append(rr) _nps.append(nps) if nott==1: _s.append(1) else: _s.append(1) arrayprev = rp else: arrayprev = [float(x) for x in line.split(',')] if cnt > Kile + 10: break cnt+=1 C = [[0]*len(rr)]*len(rr) C = np.asarray(C) print "\n\n selected frames number = ", ile, "\n\n" if sn != None: with open(path+sn) as ff: for line in ff: a,b,w = line.split(',') a = int(a); b = int(b); w = int(w) if w==1: C[a-1][b-1] = 1; print "trans..." Tprim = np.empty((len(rr)+2, ile), np.float32) ############## for j in range(len(rr)): a = [] for i in range(ile): Tprim[j][i] = Ti[i][j] for i in range(ile): Tprim[1000][i] = _nps[i] for i in range(ile): Tprim[1001][i] = _s[i] gc.collect() print "AVG SPLIT LEN: ", np.mean(lenn) return Tprim, C, P def learnAndPredict(Ti, C, TOList): rng = np.random.RandomState(SEED) learning_rate = learning_rate0 print np.mean(Ti[1000,:]) aminW = np.amin(Ti[:1000,:]) amaxW = np.amax(Ti[:1000,:]) Ti[:1000,:] = (Ti[:1000,:] - aminW) / (amaxW - aminW) astdW = np.std(Ti[:1000,:]) ameanW = np.mean(Ti[:1000,:]) Ti[:1000,:] = (Ti[:1000,:] - ameanW) / astdW aminacW = np.amin(Ti[1000,:]) amaxacW = np.amax(Ti[1000,:]) print aminW, amaxW, aminacW, amaxacW Ti[1000,:] = (Ti[1000,:] - aminacW) / (amaxacW - aminacW) astdacW = np.std(Ti[1000,:]) ameanacW = np.mean(Ti[1000,:]) Ti[1000,:] = (Ti[1000,:] - ameanacW) / astdacW ile__ = len(TOList) ileList = np.zeros(ile__) for titer in range(len(TOList)): print np.mean(TOList[titer][1000,:]) TOList[titer][:1000,:] = (TOList[titer][:1000,:] - aminW)/(amaxW - aminW) TOList[titer][:1000,:] = (TOList[titer][:1000,:] - ameanW)/astdW TOList[titer][1000,:] = (TOList[titer][1000,:] - aminacW)/(amaxacW - aminacW) TOList[titer][1000,:] = (TOList[titer][1000,:] - ameanacW)/astdacW _, ileList[titer] = TOList[titer].shape _, ile = Ti.shape N = NN data = []; yyy = []; need = 1; BYL = {}; j= 0; dwa = 0; ONES = []; ZEROS = [] for i in range(NN): for j in range(NN): if i!= j: if C[i][j]==1: ONES.append((i,j)) else: ZEROS.append((i,j)) Nones = len(ONES) rng.shuffle(ONES) Nzeros = len(ZEROS) print Nones print Nzeros Needed = NUM_TRAIN/2 onesPerPair = Needed / Nones + 1 onesIter = 0 jj = 0 while jj < NUM_TRAIN: if jj%300000 == 0: print jj/300000, need = 1 - need if need == 1: pairNo = onesIter % Nones ppp = onesIter / Nones s,t = ONES[pairNo] shift = rng.randint(0, ile - L) onesIter += 1 if need == 0: zer = rng.randint(Nzeros) s,t = ZEROS[zer] del ZEROS[zer] Nzeros -= 1 shift = rng.randint(0, ile - L) x = np.hstack(( Ti[s][shift:shift+L], Ti[t][shift:shift+L], Ti[1000][shift:shift+L])) y = C[s][t] data.append(x); yyy.append(y) jj+=1 data = np.array(data, dtype=theano.config.floatX) is_train = np.array( ([0]*96 + [1,1,2,2]) * (NUM_TRAIN / 100)) yyy = np.array(yyy) train_set_x0, train_set_y0 = np.array(data[is_train==0]), yyy[is_train==0] test_set_x, test_set_y = np.array(data[is_train==1]), yyy[is_train==1] valid_set_x, valid_set_y = np.array(data[is_train==2]), yyy[is_train==2] n_train_batches = len(train_set_y0) / batch_size n_valid_batches = len(valid_set_y) / batch_size n_test_batches = len(test_set_y) / batch_size epoch = T.scalar() index = T.lscalar() x = T.matrix('x') inone2 = T.matrix('inone2') y = T.ivector('y') print '... building the model' #-------- my layers ------------------- #--------------------- layer0_input = x.reshape((batch_size, 1, 3, L)) Cx = 5 layer0 = ConvolutionalLayer(rng, input=layer0_input, image_shape=(batch_size, 1, 3, L), filter_shape=(nkerns[0], 1, 2, Cx), poolsize=(1, 1), fac = 0) ONE = (3 - 2 + 1) / 1 L2 = (L - Cx + 1) / 1 #--------------------- Cx2 = 5 layer1 = ConvolutionalLayer(rng, input=layer0.output, image_shape=(batch_size, nkerns[0], ONE, L2), filter_shape=(nkerns[1], nkerns[0], 2, Cx2), poolsize=(1, 1), activation=ReLU, fac = 0) ONE = (ONE - 2 + 1) /1 L3 = (L2 - Cx2 + 1) /1 #--------------------- Cx3 = 1 layer1b = ConvolutionalLayer(rng, input=layer1.output, image_shape=(batch_size, nkerns[1], ONE, L3), filter_shape=(nkerns[2], nkerns[1], 1, Cx3), poolsize=(1, POOL), activation=ReLU, fac = 0) ONE = (ONE - 1 + 1) /1 L4 = (L3 - Cx3 + 1) /POOL REGx = 100 #--------------------- layer2_input = layer1b.output.flatten(2) print layer2_input.shape use_b = False layer2 = HiddenLayer(rng, input=layer2_input, n_in=nkerns[2]*L4 , n_out=REGx, activation=T.tanh, use_bias = use_b) layer3 = LogisticRegression(input=layer2.output, n_in=REGx, n_out=2) cost = layer3.negative_log_likelihood(y) out_x2 = theano.shared(np.asarray(np.zeros((N,L)), dtype=theano.config.floatX)) inone2 = theano.shared(np.asarray(np.zeros((1,L)), dtype=theano.config.floatX)) inone3 = theano.shared(np.asarray(np.zeros((1,L)), dtype=theano.config.floatX)) inone4 = theano.shared(np.asarray(np.zeros((1,L)), dtype=theano.config.floatX)) test_set_x = theano.shared(np.asarray(test_set_x, dtype=theano.config.floatX)) train_set_x = theano.shared(np.asarray(train_set_x0, dtype=theano.config.floatX)) train_set_y = T.cast(theano.shared(np.asarray(train_set_y0, dtype=theano.config.floatX)), 'int32') test_set_y = T.cast(theano.shared(np.asarray(test_set_y, dtype=theano.config.floatX)), 'int32') valid_set_y = T.cast(theano.shared(np.asarray(valid_set_y, dtype=theano.config.floatX)), 'int32') valid_set_x = theano.shared(np.asarray(valid_set_x, dtype=theano.config.floatX)) test_model = theano.function([index], layer3.errors(y), givens={ x: test_set_x[index * batch_size: (index + 1) * batch_size], y: test_set_y[index * batch_size: (index + 1) * batch_size]}) validate_model = theano.function([index], layer3.errors(y), givens={ x: valid_set_x[index * batch_size: (index + 1) * batch_size], y: valid_set_y[index * batch_size: (index + 1) * batch_size]}) mom_start = 0.5; mom_end = 0.98; mom_epoch_interval = n_epochs * 1.0 #### @@@@@@@@@@@ class_params0 = [layer3, layer2, layer1, layer1b, layer0] class_params = [ param for layer in class_params0 for param in layer.params ] gparams = [] for param in class_params: gparam = T.grad(cost, param) gparams.append(gparam) gparams_mom = [] for param in class_params: gparam_mom = theano.shared(np.zeros(param.get_value(borrow=True).shape, dtype=theano.config.floatX)) gparams_mom.append(gparam_mom)<|fim▁hole|> mom = ifelse(epoch < mom_epoch_interval, mom_start*(1.0 - epoch/mom_epoch_interval) + mom_end*(epoch/mom_epoch_interval), mom_end) updates = OrderedDict() for gparam_mom, gparam in zip(gparams_mom, gparams): updates[gparam_mom] = mom * gparam_mom - (1. - mom) * learning_rate * gparam for param, gparam_mom in zip(class_params, gparams_mom): stepped_param = param + updates[gparam_mom] squared_filter_length_limit = 15.0 if param.get_value(borrow=True).ndim == 2: col_norms = T.sqrt(T.sum(T.sqr(stepped_param), axis=0)) desired_norms = T.clip(col_norms, 0, T.sqrt(squared_filter_length_limit)) scale = desired_norms / (1e-7 + col_norms) updates[param] = stepped_param * scale else: updates[param] = stepped_param output = cost train_model = theano.function(inputs=[epoch, index], outputs=output, updates=updates, givens={ x: train_set_x[index * batch_size:(index + 1) * batch_size], y: train_set_y[index * batch_size:(index + 1) * batch_size]}) keep = theano.function([index], layer3.errorsFull(y), givens={ x: train_set_x[index * batch_size:(index + 1) * batch_size], y: train_set_y[index * batch_size:(index + 1) * batch_size]}, on_unused_input='warn') timer = time.clock() print "finished reading", (timer - start_time0) /60. , "minutes " # TRAIN MODEL # print '... training' validation_frequency = n_train_batches; best_params = None; best_validation_loss = np.inf best_iter = 0; test_score = 0.; epochc = 0; while (epochc < n_epochs): epochc = epochc + 1 learning_rate = learning_rate0 * (1.2 - ((1.0 * epochc)/n_epochs)) for minibatch_index in xrange(n_train_batches): iter = (epochc - 1) * n_train_batches + minibatch_index cost_ij = train_model(epochc, minibatch_index) if (iter + 1) % validation_frequency == 0: validation_losses = [validate_model(i) for i in xrange(n_valid_batches)] this_validation_loss = np.mean(validation_losses) print(' %i) err %.2f ' % (epochc, this_validation_loss/10)), L, nkerns, REGx, "|", Cx, Cx2, Cx3, batch_size if this_validation_loss < best_validation_loss or epochc % 30 == 0: best_validation_loss = this_validation_loss best_iter = iter test_losses = [test_model(i) for i in xrange(n_test_batches)] test_score = np.mean(test_losses) print((' epoch %i, minibatch %i/%i, test error of best ' 'model %f %%') % (epochc, minibatch_index + 1, n_train_batches, test_score/10)) ############ timel = time.clock() print "finished learning", (timel - timer) /60. , "minutes " ppm = theano.function([index], layer3.pred_proba_mine(), givens={ x: T.horizontal_stack(T.tile(inone2, (batch_size ,1)), out_x2[index * batch_size: (index + 1) * batch_size], T.tile(inone3, (batch_size ,1))), y: train_set_y[0 * (batch_size): (0 + 1) * (batch_size)] }, on_unused_input='warn') NONZERO = (N*N-N) gc.collect() RESList = [np.zeros((N,N)) for it in range(ile__)] for __net in range(ile__): TO = TOList[__net] ileO = ileList[__net] RES = RESList[__net] shift = 0.1 DELTAshift = (ileO-L) / (Q-1) print "DELTAshift:", DELTAshift for q in range (Q): dataO = []; print (q+1),"/", Q , " ", out_x2.set_value(np.asarray(np.array(TO[:,shift:shift+L]), dtype=theano.config.floatX)) PARTIAL = np.zeros((N,N)) inone3.set_value(np.asarray(np.array(TO[1000][shift:shift+L]).reshape(1,L), dtype=theano.config.floatX)) for i in range(N): inone2.set_value(np.asarray(np.array(TO[i][shift:shift+L]).reshape(1,L), dtype=theano.config.floatX)) p = [ppm(ii) for ii in xrange( N / batch_size)] for pos in range(N): if pos != i: PARTIAL[i][pos] += p[pos / batch_size][pos % batch_size][1] for i in range(N): for j in range(N): RES[i][j] += PARTIAL[i][j] shift += DELTAshift print "Finished", __net RESList[__net] = RES/np.max(RES) gc.collect() end_time = time.clock() print "finished predicting", (end_time - timel) /60. , "minutes ", str(nkerns), "using SEED = ", SEED print('The code for file ' + os.path.split(__file__)[1] + ' ran for %.2fm' % ((end_time - start_time0) / 60.)) return RESList if __name__ == '__main__': MY = 9 #(GPU) VER = 1 if len(sys.argv)>1: # select random sequence seed VER = int(sys.argv[1]) nkerns = [18, 40, 15] if VER == 1: SEED = 8001 if VER == 2: SEED = 80001 if VER == 3: SEED = 888 if VER == 4: SEED = 8881 if VER == 5: SEED = 8001 if VER == 6: SEED = 100 if VER == 7: SEED = 18000 if VER == 8: SEED = 80801 start_time0 = time.clock() print THREAD if MY == 1 or MY == 5 or MY == 2 or MY==9: path = "/firstly/set/path/here" name = "normal-1" s = "/"+name+"/fluorescence_"+name+".txt" sn = "/"+name+"/network_"+name+".txt" sp = "/"+name+"/networkPositions_"+name+".txt" print name TN1, CN1 , PN1 = read(s,sn,sp, Knormal) gc.collect() name0 = name name = "normal-3" s = "/"+name+"/fluorescence_"+name+".txt" sn = "/"+name+"/network_"+name+".txt" sp = "/"+name+"/networkPositions_"+name+".txt" print name TN2, CN2, PN2 = read(s,sn,sp, Knormal) gc.collect() name = "normal-2" s = "/"+name+"/fluorescence_"+name+".txt" sn = "/"+name+"/network_"+name+".txt" sp = "/"+name+"/networkPositions_"+name+".txt" print name TN3, CN3 , PN3 = read(s,sn,sp, Knormal) gc.collect() if MY == 9: print "reading valid..." s = "/valid/fluorescence_valid.txt" sn = None sp = "/valid/networkPositions_valid.txt" TV, _, PV = read(s,sn,sp, Knormal) print "reading test..." s = "/test/fluorescence_test.txt" sn = None sp = "/test/networkPositions_test.txt" TT, _, PT = read(s,sn,sp, Knormal) [RN2, RT, RV, RN3] = learnAndPredict(TN1, CN1, [TN2, TT, TV, TN3]) suff = np.random.randint(10000) f = open("./res_ver"+str(VER)+".csv", 'w') f.write("NET_neuronI_neuronJ,Strength\n") for i in range (1000): for j in range (1000): f.write("valid_" +str(i+1)+"_"+str(j+1)+","+str(RV[i][j])+"\n") for i in range (1000): for j in range (1000): f.write("test_" +str(i+1)+"_"+str(j+1)+","+str(RT[i][j])+"\n") f.close() print "Wrote solution of VER ==", str(VER) RN2_ = RN2.flatten().tolist() a = auc.auc(CN2.flatten().tolist(),RN2_) RN3_ = RN3.flatten().tolist() a2 = auc.auc(CN3.flatten().tolist(),RN3_) print ("RES: %.2f learning (%.2f, %.2f)" % ((a+a2)*50, a*100, a2*100 ))<|fim▁end|>
<|file_name|>testwebfunctionaldb.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python # -*- coding: UTF8 -*- # Este arquivo é parte do programa Carinhas # Copyright 2013-2014 Carlo Oliveira <[email protected]>, # `Labase <http://labase.selfip.org/>`__; `GPL <http://is.gd/3Udt>`__. # # Carinhas é um software livre; você pode redistribuí-lo e/ou # modificá-lo dentro dos termos da Licença Pública Geral GNU como # publicada pela Fundação do Software Livre (FSF); na versão 2 da # Licença. # # Este programa é distribuído na esperança de que possa ser útil, # mas SEM NENHUMA GARANTIA; sem uma garantia implícita de ADEQUAÇÃO # a qualquer MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a # Licença Pública Geral GNU para maiores detalhes. # # Você deve ter recebido uma cópia da Licença Pública Geral GNU # junto com este programa, se não, escreva para a Fundação do Software # Livre(FSF) Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ ############################################################ SuperPython - Teste de Funcionalidade Web ############################################################ Verifica a funcionalidade do servidor web. """ __author__ = 'carlo' import unittest import sys import bottle import os import sys import os project_server = os.path.dirname(os.path.abspath(__file__)) project_server = os.path.join(project_server, '../src/') # print(project_server) sys.path.insert(0, project_server) # make sure the default templates directory is known to Bottle templates_dir = os.path.join(project_server, 'server/views/') # print(templates_dir) if templates_dir not in bottle.TEMPLATE_PATH: bottle.TEMPLATE_PATH.insert(0, templates_dir) if sys.version_info[0] == 2: from mock import MagicMock, patch else: from unittest.mock import MagicMock, patch, ANY from webtest import TestApp from server.control import application as appbottle import server.modelo_redis as cs import server.control as ct class FunctionalWebTest(unittest.TestCase): def setUp(self): cs.DBF = '/tmp/redis_test.db' pass def test_default_page(self): """ test_default_page """ app = TestApp(appbottle) response = app.get('/static/index.html') self.assertEqual('200 OK', response.status) self.assertTrue('<title>Jogo Eica - Cadastro</title>' in response.text, response.text[:1000]) def test_default_redirect(self): """test_default_redirect """ app = TestApp(appbottle) response = app.get('/')<|fim▁hole|> def test_register(self): """test_register """ # app = TestApp(appbottle) # response = app.get('/static/register?doc_id="10000001"&module=projeto2222') rec_id, response = self._get_id('3333') self.assertEqual('200 OK', response.status) self.assertTrue(rec_id in response, str(response)) # rec_id = str(response).split('ver = main("')[1].split('e0cb4e39e071")')[0] + 'e0cb4e39e071' expected_record = "{'module': 'projeto2222', 'user': 'projeto2222-lastcodename', 'idade': '00015'," received_record = cs.DRECORD.get(rec_id) assert expected_record in str(received_record),\ "{}: {}".format(rec_id, received_record) def _get_id(self, ref_id='e0cb4e39e071', url='/static/register?doc_id="10000001"&module=projeto2222'): """test_store """ app = TestApp(appbottle) user, idade, ano, sexo = 'projeto2222-lastcodename', '00015', '0009', 'outro' user_data = dict(doc_id=ref_id, user=user, idade=idade, ano=ano, sexo=sexo) response = app.get(url, params=user_data) return str(response).split('ver = main("')[1].split('")')[0], response def test_store(self): """test_store """ app = TestApp(appbottle) # response = app.get('/static/register?doc_id="10000001"&module=projeto2222') # rec_id = str(response).split('ver = main("')[1].split('e0cb4e39e071")')[0] + 'e0cb4e39e071' rec_id, _ = self._get_id() response = app.post('/record/store', self._pontua(rec_id)) self.assertEqual('200 OK', response.status) self.assertTrue('", "tempo": "20' in response, str(response)) # self.assertTrue('{"module": "projeto2222", "jogada": [{"carta": "2222",' in str(response), str(response)) expected_record = "{'module': 'projeto2222', 'user': 'projeto2222-lastcodename', 'idade': '00015'," received_record = str(response) assert expected_record.replace("'", '"') in received_record,\ "{}: {}".format(rec_id, received_record) def _pontua(self, ref_id): ct.LAST = ref_id jogada = {"doc_id": ref_id, "carta": 2222, "casa": 2222, "move": 2222, "ponto": 2222, "tempo": 2222, "valor": 2222} return jogada def test_pontos(self): rec_id, response = self._get_id() app = TestApp(appbottle) app.post('/record/store', self._pontua(rec_id)) ct.LAST = rec_id response = app.get('/pontos') self.assertEqual('200 OK', response.status) self.assertTrue('projeto2222-lastcodename' in response, str(response)) self.assertTrue('<h3>Idade: 10 Genero: outro Ano Escolar: 9</h3>' in response, str(response)) self.assertTrue('<td><span>2222<span></td>' in response, str(response)) if __name__ == '__main__': unittest.main()<|fim▁end|>
self.assertEqual('302 Found', response.status)
<|file_name|>vsotogglbutton.d.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../scripts/ref/jquery.d.ts" /> /// <reference path="../../scripts/ref/VSS.d.ts" /> interface ITogglFormResponse { activityDescription: string; project: string; tags: string; } interface ITogglOpts { method: string; baseUrl: string; token: string; crendentials: any; onLoad: any; } declare class TogglButton { $ApiV8Url: string; $user: any; constructor(); fetchUser(token: string): void; ajax(url: string, opts: any): void; } declare class TogglButtonForm { formChangedCallbacks: any[]; workItem: any; constructor(workItem: any); initializeForm(): void; fetchTogglInformations(): void; getFormInputs(): { activityDescription: any; project: any; tags: any; }; onFormChanged(callback: any): void; } declare class TogglButtonDialogLauncher { actionContext: any; constructor(actionContext: any); launchDialog(): void;<|fim▁hole|><|fim▁end|>
} declare var togglButtonHandler: { execute: (actionContext: any) => void; };
<|file_name|>fully-qualified-type-name3.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test that we use fully-qualified type names in error messages. // ignore-test<|fim▁hole|>type T1 = usize; type T2 = isize; fn bar(x: T1) -> T2 { return x; //~^ ERROR mismatched types: expected `T2`, found `T1` } fn main() { }<|fim▁end|>
<|file_name|>strings.go<|end_file_name|><|fim▁begin|>package parser import ( "monkey/ast" "monkey/token" ) func (p *Parser) parseStringLiteralExpression() ast.Expression { return &ast.StringLiteral{Token: p.curToken, Value: p.curToken.Literal} } func (p *Parser) parseInterpolatedString() ast.Expression { is := &ast.InterpolatedString{Token: p.curToken, Value: p.curToken.Literal, ExprMap: make(map[byte]ast.Expression)} <|fim▁hole|> if p.curTokenIs(token.LBRACE) { p.nextToken() expr := p.parseExpression(LOWEST) is.ExprMap[key] = expr key++ } p.nextInterpToken() if p.curTokenIs(token.ISTRING) { break } } return is }<|fim▁end|>
key := "0"[0] for {
<|file_name|>members.js<|end_file_name|><|fim▁begin|>'use strict'; /** * @ngdoc function * @name lubriApp.controller:MembersCtrl * @description * # MembersCtrl * Controller of the lubriApp */ angular.module('lubriApp') .config(function($stateProvider) { $stateProvider.state('app.members', { abstract: true, url: '/members', templateUrl: 'views/members/main.html', controller: 'MembersCtrl' }) .state('app.members.list', { url: '', templateUrl: 'views/members/list.html', controller: 'MembersCtrl' }) .state('app.members.add', { url: '/add', templateUrl: 'views/members/form.html', controller: 'MembersCtrl' }) .state('app.members.import', { url: '/import', templateUrl: 'views/members/import.html', controller: 'MembersCtrl' }) .state('app.members.edit', { url: '/:id/edit', templateUrl: 'views/members/form.html', controller: 'MembersCtrl' }) .state('app.members.view', { url: '/:id', templateUrl: 'views/members/view.html', controller: 'MembersCtrl' }); }) .controller('MembersCtrl', function($scope, $state, $stateParams, $q, $interval, toasty, Member, SweetAlert, i18nService) { var memberId = $stateParams.id; i18nService.setCurrentLang('zh-cn'); $scope.importData = []; $scope.gridImportOptions = { enableGridMenu: true, importerDataAddCallback: function( grid, newObjects ) { $scope.importData = $scope.importData.concat( newObjects ); }, onRegisterApi: function(gridApi){ $scope.gridImportApi = gridApi; gridApi.rowEdit.on.saveRow($scope, $scope.saveRow); }, data: 'importData' }; $scope.saveRow = function( rowEntity ) { // create a fake promise - normally you'd use the promise returned by $http or $resource var promise = $q.defer(); $scope.gridImportApi.rowEdit.setSavePromise( $scope.gridImportApi.grid, rowEntity, promise.promise ); $interval( function() { promise.resolve(); }, 1000, 1); }; $scope.saveImport = function () { if ($scope.importData.length > 0) { var members = $scope.importData; for (var i=0;i<members.length;i++) { var member = members[i]; member.created = new Date(); delete member.$$hashKey; Member.upsert(member, function() { }, function(err) { console.log(err); }); }; toasty.pop.success({title: '组员导入成功', msg: members.length + '个组员成功导入到系统中!', sound: false}); loadItems(); $state.go('^.list'); }; }; if (memberId) { $scope.member = Member.findById({ id: memberId }, function() {}, function(err) { console.log(err); }); } else { $scope.member = {}; } $scope.gridOptions = { data: 'members', enableFiltering: true, paginationPageSizes: [5, 10, 15], paginationPageSize: 10, headerRowHeight: 39, rowHeight: 39, columnFooterHeight: 39, gridFooterHeight: 39, selectionRowHeaderWidth: 39, columnDefs: [ { name: 'Edit', width: 80, displayName: '编辑', enableSorting: false, enableFiltering: false, cellTemplate: '<a href="" class="ui-state-hover" ui-sref="^.edit({id: row.entity.id})"> <i class="fa fa-pencil fa-lg blue"></i></a> <a href="" class="ui-state-hover" style="margin-left:5px;" ng-click="getExternalScopes().delete({id: row.entity.id})"><i class="fa fa-trash-o fa-lg red"></i></a>' }, { name: 'name', displayName: '全称', cellTemplate: '<div class="ui-grid-cell-contents"><a href="" ui-sref="^.view({id: row.entity.id})"> {{ COL_FIELD }} </a></div>'} ,{ name: 'firstName', displayName: '姓' } ,{ name: 'lastName', displayName: '名' } ,{ name: 'displayName', displayName: '显示名' } ,{ name: 'position', displayName: '职位' } , { name: 'priority', displayName: '排序号' } ], enableGridMenu: true, enableSelectAll: true, exporterCsvFilename: 'members.csv', exporterSuppressColumns: ['Edit'], exporterPdfDefaultStyle: {fontSize: 9}, exporterPdfTableStyle: {margin: [30, 30, 30, 30]}, exporterPdfTableHeaderStyle: {fontSize: 10, bold: true, italics: true, color: 'red'}, exporterPdfHeader: { text: "Meeting Member Information", style: 'headerStyle' }, exporterPdfFooter: function ( currentPage, pageCount ) { return { text: currentPage.toString() + ' of ' + pageCount.toString(), style: 'footerStyle' }; }, exporterPdfCustomFormatter: function ( docDefinition ) { docDefinition.styles.headerStyle = { fontSize: 22, bold: true }; docDefinition.styles.footerStyle = { fontSize: 10, bold: true }; return docDefinition;<|fim▁hole|> exporterPdfPageSize: 'LETTER', exporterPdfMaxGridWidth: 500, exporterCsvLinkElement: angular.element(document.querySelectorAll(".custom-csv-link-location")) }; $scope.gridOptions.onRegisterApi = function (gridApi) { $scope.gridApi = gridApi; }; function loadItems() { $scope.members = Member.find(); } loadItems(); $scope.viewActions = { delete : $scope.delete }; $scope.delete = function(id) { SweetAlert.swal({ title: '您确定要删除吗?', type: 'warning', showCancelButton: true, confirmButtonColor: '#DD6B55' }, function(isConfirm){ if (isConfirm) { Member.deleteById(id, function() { toasty.pop.success({title: '组员被删除', msg: '您成功删除了组员!', sound: false}); loadItems(); $state.go($state.current, {}, {reload: true}); //$state.go('app.members.list'); }, function(err) { toasty.pop.error({title: '删除组员出错', msg: '删除组员发生错误:' + err, sound: false}); }); } else { return false; } }); }; $scope.formFields = [{ key: 'name', type: 'text', label: '全名', required: true }, { key: 'firstName', type: 'text', label: '姓', required: true }, { key: 'lastName', type: 'text', label: '名', required: true }, { key: 'displayName', type: 'text', label: '显示名', required: true }, { key: 'position', type: 'text', label: '职位', required: true }, { key: 'priority', type: 'number', label: '排序号', required: true }]; $scope.formOptions = { uniqueFormId: true, hideSubmit: false, submitCopy: '保存' }; $scope.onSubmit = function() { if (($scope.member.created === null) || ($scope.member.created === undefined)){ $scope.member.created = new Date(); }; Member.upsert($scope.member, function() { toasty.pop.success({title: '组员保存成功', msg: '组员已成功保存到系统中!', sound: false}); loadItems(); $state.go('^.list'); }, function(err) { console.log(err); }); }; });<|fim▁end|>
}, exporterPdfOrientation: 'portrait',
<|file_name|>issue-76077-1.rs<|end_file_name|><|fim▁begin|>// run-rustfix #![allow(dead_code, unused_variables)] pub mod foo { #[derive(Default)] pub struct Foo { invisible: bool, } #[derive(Default)] pub struct Bar { pub visible: bool, invisible: bool, } } fn main() { let foo::Foo {} = foo::Foo::default();<|fim▁hole|> //~^ ERROR pattern requires `..` due to inaccessible fields let foo::Bar { visible } = foo::Bar::default(); //~^ ERROR pattern requires `..` due to inaccessible fields }<|fim▁end|>
<|file_name|>gpu.rs<|end_file_name|><|fim▁begin|>use maplit::hashmap; use crate::GpuState; use std::{collections::HashMap, mem}; #[repr(C)] #[derive(Copy, Clone)] pub(crate) struct GenHeightmapsUniforms { pub position: [i32; 2], pub origin: [i32; 2], pub spacing: f32, pub in_slot: i32, pub out_slot: i32, pub level_resolution: i32, pub face: u32, } unsafe impl bytemuck::Zeroable for GenHeightmapsUniforms {} unsafe impl bytemuck::Pod for GenHeightmapsUniforms {} #[repr(C)] #[derive(Copy, Clone)] pub(crate) struct GenDisplacementsUniforms { pub node_center: [f64; 3], pub padding0: f64, pub origin: [i32; 2], pub position: [i32; 2], pub stride: i32, pub heightmaps_slot: i32, pub displacements_slot: i32, pub face: i32, pub level_resolution: u32, } unsafe impl bytemuck::Zeroable for GenDisplacementsUniforms {} unsafe impl bytemuck::Pod for GenDisplacementsUniforms {} #[repr(C)] #[derive(Copy, Clone)] pub(crate) struct GenNormalsUniforms { pub heightmaps_origin: [i32; 2], pub heightmaps_slot: i32, pub normals_slot: i32, pub spacing: f32, pub padding: [f32; 3], } unsafe impl bytemuck::Zeroable for GenNormalsUniforms {} unsafe impl bytemuck::Pod for GenNormalsUniforms {} #[repr(C)] #[derive(Copy, Clone)] pub(crate) struct GenMaterialsUniforms { pub heightmaps_origin: [i32; 2], pub parent_origin: [u32; 2], pub heightmaps_slot: i32, pub normals_slot: i32, pub albedo_slot: i32, pub parent_slot: i32, pub spacing: f32, pub padding: i32, } unsafe impl bytemuck::Zeroable for GenMaterialsUniforms {} unsafe impl bytemuck::Pod for GenMaterialsUniforms {} pub(crate) struct ComputeShader<U> { shader: rshader::ShaderSet, bindgroup_pipeline: Option<(wgpu::BindGroup, wgpu::ComputePipeline)>, uniforms: Option<wgpu::Buffer>, name: String, _phantom: std::marker::PhantomData<U>, } #[allow(unused)] impl<U: bytemuck::Pod> ComputeShader<U> { pub fn new(shader: rshader::ShaderSource, name: String) -> Self { Self { shader: rshader::ShaderSet::compute_only(shader).unwrap(), bindgroup_pipeline: None, uniforms: None, name, _phantom: std::marker::PhantomData, } } pub fn refresh(&mut self) -> bool { if self.shader.refresh() { self.bindgroup_pipeline = None; true } else {<|fim▁hole|> false } } pub fn run( &mut self, device: &wgpu::Device, encoder: &mut wgpu::CommandEncoder, state: &GpuState, dimensions: (u32, u32, u32), uniforms: &U, ) { if self.uniforms.is_none() { self.uniforms = Some(device.create_buffer(&wgpu::BufferDescriptor { size: mem::size_of::<U>() as u64, usage: wgpu::BufferUsage::COPY_DST | wgpu::BufferUsage::UNIFORM, mapped_at_creation: false, label: Some(&format!("buffer.{}.uniforms", self.name)), })); } if self.bindgroup_pipeline.is_none() { let (bind_group, bind_group_layout) = state.bind_group_for_shader( device, &self.shader, hashmap!["ubo".into() => (false, wgpu::BindingResource::Buffer(wgpu::BufferBinding { buffer: self.uniforms.as_ref().unwrap(), offset: 0, size: None, }))], HashMap::new(), &format!("bindgroup.{}", self.name), ); self.bindgroup_pipeline = Some(( bind_group, device.create_compute_pipeline(&wgpu::ComputePipelineDescriptor { layout: Some(&device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { bind_group_layouts: [&bind_group_layout][..].into(), push_constant_ranges: &[], label: Some(&format!("pipeline.{}.layout", self.name)), })), module: &device.create_shader_module(&wgpu::ShaderModuleDescriptor { label: Some(&format!("shader.{}", self.name)), source: wgpu::ShaderSource::SpirV(self.shader.compute().into()), flags: wgpu::ShaderFlags::empty(), }), entry_point: "main", label: Some(&format!("pipeline.{}", self.name)), }), )); } let staging = device.create_buffer(&wgpu::BufferDescriptor { size: mem::size_of::<U>() as u64, usage: wgpu::BufferUsage::COPY_SRC, label: Some(&format!("buffer.temporary.{}.upload", self.name)), mapped_at_creation: true, }); let mut buffer_view = staging.slice(..).get_mapped_range_mut(); buffer_view[..mem::size_of::<U>()].copy_from_slice(bytemuck::bytes_of(uniforms)); drop(buffer_view); staging.unmap(); encoder.copy_buffer_to_buffer( &staging, 0, self.uniforms.as_ref().unwrap(), 0, mem::size_of::<U>() as u64, ); let mut cpass = encoder.begin_compute_pass(&wgpu::ComputePassDescriptor { label: None }); cpass.set_pipeline(&self.bindgroup_pipeline.as_ref().unwrap().1); cpass.set_bind_group(0, &self.bindgroup_pipeline.as_ref().unwrap().0, &[]); cpass.dispatch(dimensions.0, dimensions.1, dimensions.2); } }<|fim▁end|>
<|file_name|>rpc.py<|end_file_name|><|fim▁begin|># adapted from zmq_server_example.py in tinyrpc import time, sys import zmq from tinyrpc.protocols.jsonrpc import JSONRPCProtocol from tinyrpc.transports.zmq import ZmqServerTransport from tinyrpc.server import RPCServer from tinyrpc.dispatch import RPCDispatcher class Server(object): def __init__(self, req_callback): # print 'initializing Rpc' self.ctx = zmq.Context() self.dispatcher = RPCDispatcher()<|fim▁hole|> self.req_callback = req_callback self.rpc_server = RPCServer( self.transport, JSONRPCProtocol(), self.dispatcher ) self.dispatcher.public(self.request) # register this function (replacing the decorator) # print 'READYc: '+str(time.clock()) # sys.exit(0) self.rpc_server.serve_forever() # def start(self): # self.rpc_server.serve_forever() def request(self, req): return self.req_callback(req)<|fim▁end|>
self.transport = ZmqServerTransport.create(self.ctx, 'tcp://127.0.0.1:8000')
<|file_name|>channel_partner_links.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import proto # type: ignore from google.cloud.channel_v1.types import common from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( package="google.cloud.channel.v1", manifest={ "ChannelPartnerLinkView", "ChannelPartnerLinkState", "ChannelPartnerLink", }, ) class ChannelPartnerLinkView(proto.Enum): r"""The level of granularity the<|fim▁hole|> [ChannelPartnerLink][google.cloud.channel.v1.ChannelPartnerLink] will display. """ UNSPECIFIED = 0 BASIC = 1 FULL = 2 class ChannelPartnerLinkState(proto.Enum): r"""ChannelPartnerLinkState represents state of a channel partner link. """ CHANNEL_PARTNER_LINK_STATE_UNSPECIFIED = 0 INVITED = 1 ACTIVE = 2 REVOKED = 3 SUSPENDED = 4 class ChannelPartnerLink(proto.Message): r"""Entity representing a link between distributors and their indirect resellers in an n-tier resale channel. Attributes: name (str): Output only. Resource name for the channel partner link, in the format accounts/{account_id}/channelPartnerLinks/{id}. reseller_cloud_identity_id (str): Required. Cloud Identity ID of the linked reseller. link_state (google.cloud.channel_v1.types.ChannelPartnerLinkState): Required. State of the channel partner link. invite_link_uri (str): Output only. URI of the web page where partner accepts the link invitation. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Timestamp of when the channel partner link is created. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Timestamp of when the channel partner link is updated. public_id (str): Output only. Public identifier that a customer must use to generate a transfer token to move to this distributor-reseller combination. channel_partner_cloud_identity_info (google.cloud.channel_v1.types.CloudIdentityInfo): Output only. Cloud Identity info of the channel partner (IR). """ name = proto.Field(proto.STRING, number=1,) reseller_cloud_identity_id = proto.Field(proto.STRING, number=2,) link_state = proto.Field(proto.ENUM, number=3, enum="ChannelPartnerLinkState",) invite_link_uri = proto.Field(proto.STRING, number=4,) create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) public_id = proto.Field(proto.STRING, number=7,) channel_partner_cloud_identity_info = proto.Field( proto.MESSAGE, number=8, message=common.CloudIdentityInfo, ) __all__ = tuple(sorted(__protobuf__.manifest))<|fim▁end|>
<|file_name|>controller.go<|end_file_name|><|fim▁begin|>/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package controller import ( "github.com/op/go-logging" "github.com/spf13/viper" "github.com/openblockchain/obc-peer/openchain/consensus"<|fim▁hole|> "github.com/openblockchain/obc-peer/openchain/consensus/obcpbft" ) var logger *logging.Logger // package-level logger func init() { logger = logging.MustGetLogger("consensus/controller") } // NewConsenter constructs a Consenter object func NewConsenter(stack consensus.Stack) (consenter consensus.Consenter) { plugin := viper.GetString("peer.validator.consensus") if plugin == "obcpbft" { //logger.Info("Running with consensus plugin %s", plugin) consenter = obcpbft.GetPlugin(stack) } else { //logger.Info("Running with default consensus plugin (noops)") consenter = noops.GetNoops(stack) } return }<|fim▁end|>
"github.com/openblockchain/obc-peer/openchain/consensus/noops"
<|file_name|>funcs.js<|end_file_name|><|fim▁begin|>/** * XTemplate 所有的扩展函数集合,用于处理html中常见的格式转换,默认值等处理。 * 如果需要自行扩展,请使用window.Render的addFunc函数 * * @class Render.funcs */ (function (r, undefined) { 'use strict'; /** * 指定输出的默认值,如果有值就原样输出,如果空或是null,就输出默认值。 * * 示例: * * {name|default,'小明'} * * @method default * @param val {string} 变量名 * @param defaultVal 默认值 * @returns {object} */ r.addFunc('default', function (val, defaultVal) { if (val === undefined || typeof(val) == 'undefined' || val === '' || val === 'null') { return defaultVal; } return val; }); /** * 判断变量是否为空。 * * 示例: * * {name|empty,'空','不空'} * * @method default * @param val {string} 变量名 * @param emptyValue 为空时显示值 * @param notEmptyValue 不为空时显示的值 * @returns {object} */ r.addFunc('empty', function (val, emptyValue, notEmptyValue) { if (val === undefined || typeof(val) == 'undefined' || val === '' || val === 'null') { return emptyValue; } else { return notEmptyValue; } }); /** * 根据设定值返回指定内容 * * 示例: * * {status|case,-1,'审核不通过',1,'审核通过','待审核'} * {status|case,-1,'审核不通过',1,'审核通过',2,'VIP','待审核'} * * 参数说明:参数成对出现,第一个是设定值,第二是要返回的值;后续可以增加多个成队的参数;最后一个参数为默认值,所有设定值都不满足时输出 * @method case * @param val {string} 变量名 * @returns {object} */ r.addFunc('case', function (val) { for (var i = 1; i < arguments.length; i += 2) { if (val == arguments[i] && i < arguments.length - 1) { return arguments[i + 1]; } } return arguments[arguments.length - 1]; }); /** * 格式化货币,最少小数显示, * 示例: * * {price|format_money} * 如果price为10.0100,显示10.01 * 如果price为10.000,显示10 * * @method format_money * @param val {string} 变量名 * @returns {number} */ r.addFunc('format_money', function (val) { return parseFloat(val); }); /** * 将 Date 转化为指定格式的String * 月(M)、日(d)、小时(h)、分(m)、秒(s)、季度(q) 可以用 1-2 个占位符, * 年(y)可以用 1-4 个占位符,毫秒(S)只能用 1 个占位符(是 1-3 位的数字) * 示例: * * {date|format_date,"yyyy-MM-dd hh:mm:ss.S"} 输出 2006-07-02 08:09:04.423 * {date|format_date,"yyyy-M-d h:m:s.S"} 输出 2006-7-2 8:9:4.18 * {date|format_date,"yyyy-M-d h:m:s"} 输出 2006-7-2 8:9:4 * * @method format_date * @param val {string} 变量名 * @param fmt {string} 格式串 * @returns {string} 格式化后的日期串 */ r.addFunc('format_date', function (val, fmt) { if (typeof(val) != 'object') { val = new Date(parseInt(val)); } if (!fmt) { fmt = 'yyyy-MM-dd hh:mm:ss'; } var format_data_o = { "M+": val.getMonth() + 1, //月份 "d+": val.getDate(), //日 "h+": val.getHours(), //小时 "m+": val.getMinutes(), //分 "s+": val.getSeconds(), //秒 "q+": Math.floor((val.getMonth() + 3) / 3), //季度 "S": val.getMilliseconds() //毫秒 }; if (/(y+)/.test(fmt)) fmt = fmt.replace(RegExp.$1, (val.getFullYear() + "").substr(4 - RegExp.$1.length)); for (var k in format_data_o) if (new RegExp("(" + k + ")").test(fmt)) fmt = fmt.replace(RegExp.$1, (RegExp.$1.length == 1) ? (format_data_o[k]) : (("00" + format_data_o[k]).substr(("" + format_data_o[k]).length))); return fmt; }); /** * 数字保留小数位数 * 示例: * * {float_num|fixed,2} * * @method fixed * @param val {string} 要格式的变量名 * @param c {number} 保留的小数位置,默认为0 * @returns {number} */ r.addFunc('fixed', function (val, c) { if (typeof c == 'undefined') { c = 0; } if (typeof(val) == 'number') { return val.toFixed(c); } else { return val; } }); /** * 没有正确的函数处理时,用此函数处理,直接输出变量值 * 外部不要使用 * @param val {string} 变量名 * @returns {string} */ r.addFunc('noFunc', function (val) { return '没有找到正确的处理函数'; }); /** * 重复输出num次val * * 示例: * * {num|repeat,'*'},当num=4时,输出**** * * @method repeat * @param val {string} 重复次数 * @param res {string}要重复的内容 * @returns {string} */ r.addFunc('repeat', function (val, res) { var result = ''; for (var i = 0; i < val; i++) { result += res; } return result; }); /** * 内部实现简单的循环,注意,内部模板和普通模板有区别,需要使用小括号代替大扩号。 * 常用于嵌套循环显示。 * * 示例: * * {array|range,'(id),'},如果array=[{id:0},{id:1}],会输出0,1, * * @method range * @param list {string} 要循环的数组变量名 * @param tmpl {string} 模板 * @returns {string} 输出的html */ r.addFunc('range', function (list, tmpl) { var html = ''; if (tmpl) { tmpl = tmpl.replace(/\(/g, '{').replace(/\)/g, '}'); var func = r.syntax.buildFunc('range', tmpl); if (func) { for (var i = 0; i < list.length; i++) { html += func(r, list[i]); } } } return html; }); /** * 过滤html字符,因为系统默认已过滤html,所以此函数一般外部不使用 * * 示例: * * {code|filter_html} * * @method filter_html * @param html {string} 待过滤的html代码 * @returns {string} */ r.addFunc('filter_html', function (html) { return r.util.html(html); }); /** * 从左侧按指定长度截断字串,注意一个汉字按2个字符计算,这样可以准确的控制格式 * * 示例: * * {str|left,20,'...'} * {str|left,20} * * @method left * @param str {string} 要截断的字串变量名 * @param len {number} 截断后的字串长度,一个汉字按2个字符计算 * @param dot {string} [可选] 截断后补充的串,示例:"..." * @returns {string} */ r.addFunc('left', function (str, len, dot) { var newLength = 0; var newStr = ""; var chineseRegex = /[^\x00-\xff]/g; var singleChar = ""; var dotLen = 0; if (dot) { dotLen = dot.length; } var strLength = str.replace(chineseRegex, "**").length; for (var i = 0; i < strLength; i++) { singleChar = str.charAt(i).toString(); if (singleChar.match(chineseRegex) !== null) { newLength += 2; } else { newLength++; } if (newLength + dotLen > len) { if (dotLen > 0) { newStr += dot; } break; } newStr += singleChar;<|fim▁hole|> } return newStr; }); })(window.Render);<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React from "react"; import { Message } from "semantic-ui-react"; import Bracket from "./Bracket"; import "./index.scss"; import parseStats from './parseStats'; export default class Brackets extends React.PureComponent { constructor(props) {<|fim▁hole|> this.state = { data: this.updateStats(props), }; } componentWillReceiveProps(nextProps) { if (nextProps.stats !== this.props.stats) { this.setState({ data: this.updateStats(nextProps), }); } } updateStats = (props) => { return parseStats(props.stats); }; render () { if (!this.props.stats) { return ( <Message>Waiting for Tournament Stats...</Message> ); } return ( <div> {this.state.data.map((bracket, $index) => ( <div className="tournament-bracket" key={ bracket.match.matchID }> <Bracket finished={ this.props.stats.finished } item={bracket} key={$index} totalGames={ this.props.stats.options.numberOfGames } /> </div> ))} </div> ); } }<|fim▁end|>
super(props);
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
from PETScMatOps import *
<|file_name|>ibos.select2.js<|end_file_name|><|fim▁begin|>/** * 基于jquery.select2扩展的select插件,基本使用请参考select2相关文档 * 默认是多选模式,并提供了input模式下的初始化方法,对应的数据格式是{ id: 1, text: "Hello" } * 这里的参数只对扩展的部分作介绍 * filter、includes、excludes、query四个参数是互斥的,理论只能有其一个参数 * @method ibosSelect * @param option.filter * @param {Function} option.filter 用于过滤源数据的函数 * @param {Array} option.includes 用于过滤源数据的数据,有效数据的id组 * @param {Array} option.excludes 用于过滤源数据的数据,无效数据的id组 * @param {Boolean} option.pinyin 启用拼音搜索,需要pinyinEngine组件 * @return {jQuery} */ $.fn.ibosSelect = (function(){ var _process = function(datum, collection, filter){ var group, attr; datum = datum[0]; if (datum.children) { group = {}; for (attr in datum) { if (datum.hasOwnProperty(attr)) group[attr] = datum[attr]; } group.children = []; $(datum.children).each2(function(i, childDatum) { _process(childDatum, group.children, filter); }); if (group.children.length) { collection.push(group); } } else { if(filter && !filter(datum)) { return false; } collection.push(datum); } } // 使用带有filter过滤源数据的query函数,其实质就是在query函数执行之前,用filter函数先过滤一次数据 var _queryWithFilter = function(query, filter){ var t = query.term, filtered = { results: [] }, data = []; $(this.data).each2(function(i, datum) { _process(datum, data, filter); }); if (t === "") { query.callback({ results: data }); return; } $(data).each2(function(i, datum) { _process(datum, filtered.results, function(d){ return query.matcher(t, d.text + ""); }) }); query.callback(filtered); } // 根据ID从data数组中获取对应的文本, 主要用于val设置 var _getTextById = function(id, data){ // debugger; var ret; for(var i = 0; i < data.length; i++){ if(data[i].children){ ret = _getTextById(id, data[i].children); if(typeof ret !== "undefined"){ break; } } else { if(data[i].id + "" === id) { ret = data[i].text; break; } } } return ret; } var defaults = { multiple: true, pinyin: true, formatResultCssClass: function(data){ return data.cls; }, formatNoMatches: function(){ return U.lang("S2.NO_MATCHES"); }, formatSelectionTooBig: function (limit) { return U.lang("S2.SELECTION_TO_BIG", { count: limit}); }, formatSearching: function () { return U.lang("S2.SEARCHING"); }, formatInputTooShort: function (input, min) { return U.lang("S2.INPUT_TO_SHORT", { count: min - input.length}); }, formatLoadMore: function (pageNumber) { return U.lang("S2.LOADING_MORE"); }, initSelection: function(elem, callback){ var ins = elem.data("select2"), data = ins.opts.data, results; if(ins.opts.multiple) { results = []; $.each(elem.val().split(','), function(index, val){ results.push({id: val, text: _getTextById(val, data)}); }) } else { results = { id: elem.val(), text: _getTextById(elem.val(), data) } } callback(results); } } var select2 = function(option){ if(typeof option !== "string") { option = $.extend({}, defaults, option); // 注意: filter | query | includes | excludes 四个属性是互斥的 // filter基于query, 而includes、excludes基于filter // 优先度 includes > excludes > filter > query // includes是一个数组,指定源数据中有效数据的ID值,将过滤ID不在此数组中的数据 if(option.includes && $.isArray(option.includes)){ option.filter = function(datum){ return $.inArray(datum.id, option.includes) !== -1; } // includes是一个数组,指定源数据中无效数据的ID值,将过滤ID在此数组中的数据 } else if(option.excludes && $.isArray(option.excludes)) { option.filter = function(datum){ return $.inArray(datum.id, option.excludes) === -1; } } // 当有filter属性时,将使用自定义的query方法替代原来的query方法,filter用于从源数据层面上过滤不需要出现的数据 if(option.filter){ option.query = function(query) { _queryWithFilter(query, option.filter); } } // 使用pinyin搜索引擎 if(option.pinyin) { var _customMatcher = option.matcher; option.matcher = function(term){ if(term === ""){ return true; } return Ibos.matchSpell.apply(this, arguments) && (_customMatcher ? _customMatcher.apply(this, arguments) : true); } } // 使用 select 元素时,要去掉一部分默认项 if($(this).is("select")) { delete option.multiple; delete option.initSelection; } return $.fn.select2.call(this, option) } return $.fn.select2.apply(this, arguments) }<|fim▁hole|><|fim▁end|>
return select2; })();
<|file_name|>py25tests.py<|end_file_name|><|fim▁begin|>#-*- coding: ISO-8859-1 -*- # pysqlite2/test/regression.py: pysqlite regression tests # # Copyright (C) 2007 Gerhard Häring <[email protected]> # # This file is part of pysqlite. # # This software is provided 'as-is', without any express or implied # warranty. In no event will the authors be held liable for any damages # arising from the use of this software. # # Permission is granted to anyone to use this software for any purpose, # including commercial applications, and to alter it and redistribute it # freely, subject to the following restrictions: # # 1. The origin of this software must not be misrepresented; you must not # claim that you wrote the original software. If you use this software # in a product, an acknowledgment in the product documentation would be # appreciated but is not required. # 2. Altered source versions must be plainly marked as such, and must not be # misrepresented as being the original software. # 3. This notice may not be removed or altered from any source distribution.<|fim▁hole|> from __future__ import with_statement import unittest import pysqlite2.dbapi2 as sqlite did_rollback = False class MyConnection(sqlite.Connection): def rollback(self): global did_rollback did_rollback = True sqlite.Connection.rollback(self) class ContextTests(unittest.TestCase): def setUp(self): global did_rollback self.con = sqlite.connect(":memory:", factory=MyConnection) self.con.execute("create table test(c unique)") did_rollback = False def tearDown(self): self.con.close() def CheckContextManager(self): """Can the connection be used as a context manager at all?""" with self.con: pass def CheckContextManagerCommit(self): """Is a commit called in the context manager?""" with self.con: self.con.execute("insert into test(c) values ('foo')") self.con.rollback() count = self.con.execute("select count(*) from test").fetchone()[0] self.assertEqual(count, 1) def CheckContextManagerRollback(self): """Is a rollback called in the context manager?""" global did_rollback self.assertEqual(did_rollback, False) try: with self.con: self.con.execute("insert into test(c) values (4)") self.con.execute("insert into test(c) values (4)") except sqlite.IntegrityError: pass self.assertEqual(did_rollback, True) def suite(): ctx_suite = unittest.makeSuite(ContextTests, "Check") return unittest.TestSuite((ctx_suite,)) def test(): runner = unittest.TextTestRunner() runner.run(suite()) if __name__ == "__main__": test()<|fim▁end|>
<|file_name|>open-browser-tab.tsx<|end_file_name|><|fim▁begin|>/* * This file is part of CoCalc: Copyright © 2020 Sagemath, Inc. * License: AGPLv3 s.t. "Commons Clause" – see LICENSE.md for details */ import * as React from "react"; import { defaults } from "smc-util/misc"; import { alert_message } from "../alerts"; interface WindowOpts { menubar?: "yes" | "no"; toolbar?: "yes" | "no"; resizable?: "yes" | "no"; scrollbar?: "yes" | "no"; width?: string; height?: string; } export function open_popup_window(url: string, opts: WindowOpts = {}) { return open_new_tab(url, true, opts); } // open new tab and check if user allows popups. if yes, return the tab -- otherwise show an alert and return null export function open_new_tab( url: string, popup: boolean = false, opts: WindowOpts = {} ) { // if popup=true, it opens a smaller overlay window instead of a new tab (though depends on browser) let tab; opts = defaults(opts, {<|fim▁hole|> menubar: "yes", toolbar: "no", resizable: "yes", scrollbars: "yes", width: "800", height: "640", }); if (popup) { const x: string[] = []; for (const k in opts) { const v = opts[k]; if (v != null) { x.push(`${k}=${v}`); } } const popup_opts = x.join(","); tab = window.open("", "_blank", popup_opts); } else { tab = window.open("", "_blank"); } if (tab == null || tab.closed == null || tab.closed) { // either tab isn't even defined (or doesn't have closed attribute) -- or already closed: then popup blocked let message; if (url) { message = ( <span> Either enable popups for this website or{" "} <a href={url} target="_blank"> click here. </a> </span> ); } else { message = "Enable popups for this website and try again."; } alert_message({ title: "Popups blocked.", message, type: "info", timeout: 15, }); return null; } // equivalent to rel=noopener, i.e. neither tabs know about each other via window.opener // credits: https://stackoverflow.com/a/49276673/54236 tab.opener = null; // only *after* the above, we set the URL! tab.location = url; return tab; }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2017-2019 Cloudify Platform Ltd. All rights reserved #<|fim▁hole|># # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .operations import upload_blueprint # NOQA from .operations import delete # NOQA from .operations import create # NOQA from .operations import execute_start # NOQA from .operations import refresh # NOQA<|fim▁end|>
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at
<|file_name|>plugin_npapi.cc<|end_file_name|><|fim▁begin|>/* * Copyright 2008 The Native Client Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can * be found in the LICENSE file. */ #include "native_client/src/trusted/plugin/npapi/plugin_npapi.h" #include <stdio.h> #include <string.h> #include <limits> #include <set> #include "native_client/src/include/checked_cast.h" #include "native_client/src/include/nacl_macros.h" #include "native_client/src/include/nacl_string.h" #include "native_client/src/include/portability.h" #include "native_client/src/shared/npruntime/npmodule.h" #include "native_client/src/trusted/desc/nacl_desc_wrapper.h" #include "native_client/src/trusted/handle_pass/browser_handle.h" #include "native_client/src/trusted/plugin/browser_interface.h" #include "native_client/src/trusted/plugin/npapi/async_receive.h" #include "native_client/src/trusted/plugin/npapi/browser_impl_npapi.h" #include "native_client/src/trusted/plugin/npapi/closure.h" #include "native_client/src/trusted/plugin/npapi/multimedia_socket.h" #include "native_client/src/trusted/plugin/npapi/scriptable_impl_npapi.h" #include "native_client/src/trusted/plugin/npapi/video.h" #include "native_client/src/trusted/plugin/origin.h" #include "native_client/src/trusted/plugin/plugin.h" #include "native_client/src/trusted/plugin/scriptable_handle.h" #include "native_client/src/trusted/plugin/stream_shm_buffer.h" #include "native_client/src/trusted/plugin/utility.h" namespace { static bool identifiers_initialized = false; void InitializeIdentifiers() { if (identifiers_initialized) { return; } plugin::PluginNpapi::kHrefIdent = NPN_GetStringIdentifier("href"); plugin::PluginNpapi::kLengthIdent = NPN_GetStringIdentifier("length"); plugin::PluginNpapi::kLocationIdent = NPN_GetStringIdentifier("location"); identifiers_initialized = true; } // TODO(polina): is there a way to share this with PluginPPAPI? bool UrlAsNaClDesc(void* obj, plugin::SrpcParams* params) { NaClSrpcArg** ins = params->ins(); PLUGIN_PRINTF(("UrlAsNaClDesc (obj=%p, url=%s, callback=%p)\n", obj, ins[0]->arrays.str, ins[1]->arrays.oval)); plugin::Plugin* plugin = reinterpret_cast<plugin::Plugin*>(obj); const char* url = ins[0]->arrays.str; NPObject* callback_obj = reinterpret_cast<NPObject*>(ins[1]->arrays.oval); plugin::UrlAsNaClDescNotify* callback = new(std::nothrow) plugin::UrlAsNaClDescNotify(plugin, url, callback_obj); if (NULL == callback) { params->set_exception_string("out of memory in __urlAsNaClDesc"); return false; } if (!callback->StartDownload()) { PLUGIN_PRINTF(("UrlAsNaClDesc (failed to load url to local file)\n")); params->set_exception_string("specified url could not be loaded"); // callback is always deleted in URLNotify return false; } return true; } } // namespace namespace plugin { NPIdentifier PluginNpapi::kHrefIdent; NPIdentifier PluginNpapi::kLengthIdent; NPIdentifier PluginNpapi::kLocationIdent; PluginNpapi* PluginNpapi::New(NPP npp, int argc, char* argn[], char* argv[]) { PLUGIN_PRINTF(("PluginNpapi::New (npp=%p, argc=%d)\n", static_cast<void*>(npp), argc)); #if NACL_WINDOWS && !defined(NACL_STANDALONE) if (!NaClHandlePassBrowserCtor()) { return NULL; } #endif InitializeIdentifiers(); // TODO(sehr): use scoped_ptr for proper delete semantics. BrowserInterface* browser_interface = static_cast<BrowserInterface*>(new(std::nothrow) BrowserImplNpapi); if (browser_interface == NULL) { return NULL; } PluginNpapi* plugin = new(std::nothrow) PluginNpapi(); InstanceIdentifier instance_id = NPPToInstanceIdentifier(npp); if (plugin == NULL || !plugin->Init(browser_interface, instance_id, argc, argn, argv)) { PLUGIN_PRINTF(("PluginNpapi::New: Init failed\n")); return NULL; } // Add methods only implemented by the NPAPI plugin. plugin->AddMethodCall(UrlAsNaClDesc, "__urlAsNaClDesc", "so", ""); plugin->AddMethodCall(SetAsyncCallback, "__setAsyncCallback", "o", ""); // Set up the multimedia video support. plugin->video_ = new(std::nothrow) VideoMap(plugin); if (NULL == plugin->video_) { return false; } // Create the browser scriptable handle for plugin. ScriptableHandle* handle = browser_interface->NewScriptableHandle(plugin); PLUGIN_PRINTF(("PluginNpapi::New (scriptable_handle=%p)\n", static_cast<void*>(handle))); if (NULL == handle) { return NULL; } plugin->set_scriptable_handle(handle); PLUGIN_PRINTF(("PluginNpapi::New (return %p)\n", static_cast<void*>(plugin))); return plugin; } PluginNpapi::~PluginNpapi() { #if NACL_WINDOWS && !defined(NACL_STANDALONE) NaClHandlePassBrowserDtor(); #endif PLUGIN_PRINTF(("PluginNpapi::~PluginNpapi (this=%p)\n", static_cast<void* >(this))); // Delete the NPModule for this plugin. if (NULL != module_) { delete module_; } /* SCOPE */ { VideoScopedGlobalLock video_lock; PLUGIN_PRINTF(("Plugin::~Plugin deleting video_\n")); if (NULL != video_) { delete video_; video_ = NULL; } } } NPError PluginNpapi::Destroy(NPSavedData** save) { PLUGIN_PRINTF(("PluginNpapi::Destroy (this=%p, save=%p)\n", static_cast<void*>(this), static_cast<void*>(save))); ShutDownSubprocess(); // This should be done after terminating the sel_ldr subprocess so // that we can be sure we will not block forever when waiting for // the upcall thread to exit. delete module_; module_ = NULL; // This has the indirect effect of doing "delete this". PLUGIN_PRINTF(("PluginNpapi::Destroy (this=%p, scriptable_handle=%p)\n", static_cast<void*>(this), static_cast<void*>(scriptable_handle()))); scriptable_handle()->Unref(); return NPERR_NO_ERROR; } // SetWindow is called by the browser as part of the NPAPI interface for // setting up a plugin that has the ability to draw into a window. It is // passed a semi-custom window descriptor (some is platform-neutral, some not) // as documented in the NPAPI documentation. NPError PluginNpapi::SetWindow(NPWindow* window) { NPError ret = NPERR_GENERIC_ERROR; PLUGIN_PRINTF(("PluginNpapi::SetWindow(%p, %p)\n", static_cast<void* >(this), static_cast<void*>(window))); // NOTE(gregoryd): Chrome does not allow us to call NPN_GetUrlNotify during // initialization, but does call SetWindows afterwards, so we use this call // to trigger the download if the src property hasn't been specified. #if !defined(NACL_STANDALONE) // If the <embed src='...'> attr was defined, the browser would have // implicitly called GET on it, which calls LoadNaClModule() and // set_nacl_module_url(). // In the absence of this attr, we use the "nexes" attribute if present. if (nacl_module_url() == NACL_NO_URL) { const char* nexes_attr = LookupArgument("nexes"); if (nexes_attr != NULL) { SetNexesPropertyImpl(nexes_attr); } } #endif if (NULL == module_) { if (video() && video()->SetWindow(window)) { ret = NPERR_NO_ERROR; } return ret; } else { // Send NPP_SetWindow to NPModule. NPP npp = InstanceIdentifierToNPP(instance_id()); return module_->SetWindow(npp, window); } } NPError PluginNpapi::GetValue(NPPVariable variable, void* value) { const char** stringp = static_cast<const char**>(value); PLUGIN_PRINTF(("PluginNpapi::GetValue(%p, %d)\n", static_cast<void*>(this), variable)); switch (variable) { case NPPVpluginNameString: *stringp = "NativeClient Simple RPC + multimedia a/v interface"; return NPERR_NO_ERROR; case NPPVpluginDescriptionString: *stringp = "NativeClient Simple RPC interaction w/ multimedia."; return NPERR_NO_ERROR; case NPPVpluginScriptableNPObject: // Anyone requesting access to the scriptable instance is given shared // ownership of the scriptable handle. *(static_cast<NPObject**>(value)) = static_cast<ScriptableImplNpapi*>(scriptable_handle()->AddRef()); return NPERR_NO_ERROR; case NPPVpluginWindowBool: case NPPVpluginTransparentBool: case NPPVjavaClass: case NPPVpluginWindowSize: case NPPVpluginTimerInterval: case NPPVpluginScriptableInstance: case NPPVpluginScriptableIID: case NPPVjavascriptPushCallerBool: case NPPVpluginKeepLibraryInMemory: case NPPVpluginNativeAccessibleAtkPlugId: case NPPVpluginNeedsXEmbed: case NPPVformValue: case NPPVpluginUrlRequestsDisplayedBool: case NPPVpluginWantsAllNetworkStreams: case NPPVpluginCancelSrcStream: case NPPVSupportsAdvancedKeyHandling: #ifdef XP_MACOSX // Mac has several drawing, event, etc. models in NPAPI that are unique. case NPPVpluginDrawingModel: case NPPVpluginEventModel: case NPPVpluginCoreAnimationLayer: #endif // XP_MACOSX default: return NPERR_INVALID_PARAM; } } int16_t PluginNpapi::HandleEvent(void* param) { int16_t ret; PLUGIN_PRINTF(("PluginNpapi::HandleEvent(%p, %p)\n", static_cast<void*>(this), static_cast<void*>(param))); if (NULL == module_) { if (video()) { ret = video()->HandleEvent(param); } else { ret = 0; } } else { NPP npp = InstanceIdentifierToNPP(instance_id()); return module_->HandleEvent(npp, param); } return ret; } // Downloading resources can be caused implicitly (by the browser in response // to src= in the embed/object tag) or explicitly (by calls to NPN_GetURL or // NPN_GetURLNotify). Implicit downloading is happening whenever // notifyData==NULL, and always results in calling Load on the Plugin object. // Explicit downloads place a pointer to a Closure object in notifyData. How // these closures are manipulated depends on which browser we are running // within. If we are in Chrome (NACL_STANDALONE is not defined): // - NewStream creates a StreamShmBuffer object and attaches that to // stream->pdata and the buffer member of the closure (if there was one). // - WriteReady and Write populate the buffer object. // - DestroyStream signals the end of WriteReady/Write processing. If the // reason is NPRES_DONE, then the closure's Run method is invoked and // the closure is deleted. If there is no closure, Load is called. // If we are not in Chrome (NACL_STANDALONE is defined): // - NewStream returns NP_ASFILEONLY, which causes StreamAsFile to be invoked. // - StreamAsFile indicates that the browser has fully downloaded the resource // and placed it in the local file system. This causes the closure's Run // method to be invoked. If there is no closure, Load is called. // In both cases, URLNotify is used to report any errors. NPError PluginNpapi::NewStream(NPMIMEType type, NPStream* stream, NPBool seekable, uint16_t* stype) { PLUGIN_PRINTF(("PluginNpapi::NewStream(%p, %s, %p, %d)\n", static_cast<void*>(this), type, static_cast<void*>(stream), seekable)); #ifdef NACL_STANDALONE *stype = NP_ASFILEONLY; #else // When running as a built-in plugin in Chrome we cannot access the // file system, therefore we use normal streams to get the data. *stype = NP_NORMAL; // Stream pdata should not be set until the stream is created. if (NULL != stream->pdata) { return NPERR_GENERIC_ERROR; } // StreamShmBuffer is used to download large files in chunks in Chrome. StreamShmBuffer* stream_buffer = new(std::nothrow) StreamShmBuffer(); // Remember the stream buffer on the stream. stream->pdata = reinterpret_cast<void*>(stream_buffer); // Other than the default "src=" download, there should have been a // closure attached to the stream by NPN_GetURLNotify. Closure* closure = static_cast<Closure*>(stream->notifyData); if (NULL != closure) { closure->set_buffer(stream_buffer); } #endif return NPERR_NO_ERROR; } int32_t PluginNpapi::WriteReady(NPStream* stream) { if (NULL == stream) { return -1; } return 32 * 1024; } int32_t PluginNpapi::Write(NPStream* stream, int32_t offset, int32_t len, void* buf) { if (NULL == stream) { return -1; } StreamShmBuffer* stream_buffer = reinterpret_cast<StreamShmBuffer*>(stream->pdata); // Should have been set during call to NewStream. if (NULL == stream_buffer) { return -1; } return stream_buffer->write(offset, len, buf); } void PluginNpapi::StreamAsFile(NPStream* stream, const char* fname) { PLUGIN_PRINTF(("PluginNpapi::StreamAsFile(%p, %p, %s)\n", static_cast<void*>(this), static_cast<void*>(stream), fname)); // The stream should be valid until the destroy call is complete. // Furthermore, a valid filename should have been passed. if (NULL == fname || NULL == stream) { PLUGIN_PRINTF(("StreamAsFile: FAILED: fname or stream was NULL.\n")); return; } // When StreamAsFile is called a file for the stream is presented to the // plugin. This only happens outside of Chrome. So this handler calls the // appropriate load method to transfer the requested resource to the sel_ldr // instance. if (NULL == stream->notifyData) { // If there was no closure, there was no explicit plugin call to // NPN_GetURL{Notify}. Hence this resource was downloaded by default, // typically through src=... in the embed/object tag. PLUGIN_PRINTF(("StreamAsFile: default run\n")); LoadNaClModule(stream->url, fname); } else { // Otherwise, we invoke the Run on the closure that was set up by // the requestor. Closure* closure = static_cast<Closure*>(stream->notifyData); closure->RunFromFile(stream, fname); } } NPError PluginNpapi::DestroyStream(NPStream* stream, NPReason reason) { PLUGIN_PRINTF(("PluginNpapi::DestroyStream(%p, %p, %d)\n", static_cast<void*>(this), static_cast<void*>(stream), reason)); // DestroyStream is called whenever a request for a resource either succeeds // or fails. If the request succeeded, we would already have called // StreamAsFile (for non-Chrome browsers, which already invoked the Run // method on the closure), or would have done all the Writes (for Chrome, // and we still need to invoke the Run method). // The stream should be valid until the destroy call is complete. if (NULL == stream || NULL == stream->url) { return NPERR_GENERIC_ERROR; } // Defer error handling to URLNotify. if (NPRES_DONE != reason) { return NPERR_NO_ERROR; } if (NULL == stream->notifyData) { // Here we handle only the default, src=... streams (statically obtained) // Stream download completed so start the nexe load into the service // runtime. PLUGIN_PRINTF(("DestroyStream: default run\n")); StreamShmBuffer* stream_buffer = reinterpret_cast<StreamShmBuffer*>(stream->pdata); // We are running outside of Chrome, so StreamAsFile does the load. if (NULL == stream_buffer) { return NPERR_NO_ERROR; } // Note, we cannot access the HTTP status code, so we might have // been returned a 404 error page. This is reported in the ELF // validity checks that Load precipitates. LoadNaClModule(stream->url, stream_buffer); delete(stream_buffer); stream->pdata = NULL; } else { // Otherwise there was a closure. Closure* closure = static_cast<Closure*>(stream->notifyData); StreamShmBuffer* stream_buffer = closure->buffer(); if (NULL != stream_buffer) { // There was a buffer attached, so we are in Chrome. Invoke its Run. // If we are not in Chrome, Run was invoked by StreamAsFile. closure->RunFromBuffer(stream->url, stream_buffer); delete stream_buffer; } delete closure; stream->notifyData = NULL; } return NPERR_NO_ERROR; } void PluginNpapi::URLNotify(const char* url, NPReason reason, void* notifyData) { PLUGIN_PRINTF(("PluginNpapi::URLNotify(%p, %s, %d, %p)\n", static_cast<void*>(this), url, reason, notifyData)); // The url should always be non-NULL. if (NULL == url) { PLUGIN_PRINTF(("URLNotify: FAILED: url was NULL.\n")); return; } // If we succeeded, there is nothing to do. if (NPRES_DONE == reason) { return; } // If the request failed, we need to report the failure. PLUGIN_PRINTF(("URLNotify: Unable to open: '%s' reason=%d\n", url, reason)); if (NULL == notifyData) { // The implicit download failed, run the embed/object's onfail= handler. RunOnfailHandler(); } else { // Convert the reason to a string and abuse the closure's Run method // slightly by passing that the reason as the file name. Closure* closure = static_cast<Closure*>(notifyData); nacl::stringstream msg; msg << "reason: " << reason; closure->RunFromFile(static_cast<NPStream*>(NULL), msg.str()); delete closure; } } void PluginNpapi::set_module(nacl::NPModule* module) { PLUGIN_PRINTF(("PluginNpapi::set_module(%p, %p)\n", static_cast<void*>(this), static_cast<void*>(module))); delete module_; module_ = module; if (NULL != module_) { // Set the origins. module_->set_nacl_module_origin(nacl_module_origin()); module_->set_origin(origin()); // Initialize the NaCl module's NPAPI interface. // This should only be done for the first instance in a given group. module_->Initialize(); // Create a new instance of that group. const char mime_type[] = "application/nacl-npapi-over-srpc"; NPP npp = InstanceIdentifierToNPP(instance_id()); NPError err = module->New(const_cast<char*>(mime_type), npp, argc(), argn(), argv()); // Remember the scriptable version of the NaCl instance. err = module_->GetValue(npp, NPPVpluginScriptableNPObject, reinterpret_cast<void*>(&nacl_instance_)); // Send an initial NPP_SetWindow to the plugin. NPWindow window; window.height = height(); window.width = width(); module->SetWindow(npp, &window); } } bool PluginNpapi::InitializeModuleMultimedia(ScriptableHandle* raw_channel, ServiceRuntime* service_runtime) { PLUGIN_PRINTF(("PluginNpapi::InitializeModuleMultimedia\n")); video_->Enable(); multimedia_channel_ = new(std::nothrow) MultimediaSocket(browser_interface(), service_runtime); if (NULL == multimedia_channel_) { PLUGIN_PRINTF(("PluginNpapi::InitializeModuleMultimedia: " "MultimediaSocket channel construction failed.\n")); return false; } // Initialize the multimedia system. if (!multimedia_channel_->InitializeModuleMultimedia( this, raw_channel->handle())) { PLUGIN_PRINTF(("PluginNpapi::InitializeModuleMultimedia: " "InitializeModuleMultimedia failed.\n")); delete multimedia_channel_; multimedia_channel_ = NULL; return false; } return true; } void PluginNpapi::ShutdownMultimedia() { PLUGIN_PRINTF(("PluginNpapi::ShutdownMultimedia (this=%p)\n", static_cast<void*>(this))); delete multimedia_channel_; multimedia_channel_ = NULL; } void PluginNpapi::StartProxiedExecution(NaClSrpcChannel* srpc_channel) { // Check that the .nexe exports the NPAPI initialization method. NaClSrpcService* client_service = srpc_channel->client; if (NaClSrpcServiceMethodIndex(client_service, "NP_Initialize:ih:i") == kNaClSrpcInvalidMethodIndex) { return; } nacl::NPModule* npmodule = new(std::nothrow) nacl::NPModule(srpc_channel); if (NULL != npmodule) { set_module(npmodule); } } bool PluginNpapi::RequestNaClModule(const nacl::string& url) { // Load the new module if the origin of the page is valid. PLUGIN_PRINTF(("Plugin::SetProperty src = '%s'\n", url.c_str())); LoadNaClAppNotify* callback = new(std::nothrow) LoadNaClAppNotify(this, url); if ((NULL == callback) || (!callback->StartDownload())) { PLUGIN_PRINTF(("Failed to load URL to local file.\n")); // callback is always deleted in URLNotify return false; } return true; } bool PluginNpapi::SetAsyncCallback(void* obj, SrpcParams* params) {<|fim▁hole|> PluginNpapi* plugin = static_cast<PluginNpapi*>(reinterpret_cast<Plugin*>(obj)); if (plugin->service_runtime_ == NULL) { params->set_exception_string("No subprocess running"); return false; } if (plugin->receive_thread_running_) { params->set_exception_string("A callback has already been registered"); return false; } ReceiveThreadArgs* args = new(std::nothrow) ReceiveThreadArgs; if (args == NULL) { params->set_exception_string("Memory allocation failed"); return false; } args->plugin = InstanceIdentifierToNPP(plugin->instance_id()); args->callback = reinterpret_cast<NPObject*>(params->ins()[0]->arrays.oval); NPN_RetainObject(args->callback); nacl::DescWrapper* socket = plugin->service_runtime_->async_receive_desc(); NaClDescRef(socket->desc()); args->socket = plugin->wrapper_factory()->MakeGeneric(socket->desc()); // It would be nice if the thread interface did not require us to // specify a stack size. This is fairly arbitrary. size_t stack_size = 128 << 10; NaClThreadCreateJoinable(&plugin->receive_thread_, AsyncReceiveThread, args, stack_size); plugin->receive_thread_running_ = true; return true; } } // namespace plugin<|fim▁end|>
<|file_name|>read.js<|end_file_name|><|fim▁begin|>var AIO = require('../../index'); // create an instance aio = AIO(process.env.AIO_KEY || 'xxxxxxxxxxxx'); // get a list of all groups aio.groups(function(err, data) { if(err) { return console.error(err);<|fim▁hole|> }); // get a specific group by name aio.groups('Test', function(err, data) { if(err) { return console.error(err); } // log data object console.log(data); });<|fim▁end|>
} // log data array console.log(data);
<|file_name|>vector.rs<|end_file_name|><|fim▁begin|>//! Implementation of the **Vector** types. //! //! **Note:** Much of the code in this module is inspired by or copied directly from the `cgmath` //! crate. Originally we used the `cgmath` types directly, however we decided to switch to our own //! implementations in order to gain some flexibility. use crate::geom::scalar; use crate::math::{self, BaseFloat, Bounded, InnerSpace, NumCast, One, Zero}; use crate::rand::distributions::{Distribution, Standard}; use crate::rand::Rng; use crate::serde_derive::{Deserialize, Serialize}; use std::{iter, ops}; /// A 2-dimensional vector. #[repr(C)] #[derive(Default, Debug, PartialEq, Eq, Copy, Clone, Hash, Serialize, Deserialize)] pub struct Vector2<S = scalar::Default> { pub x: S, pub y: S, } /// A 3-dimensional vector. #[repr(C)] #[derive(Default, Debug, PartialEq, Eq, Copy, Clone, Hash, Serialize, Deserialize)] pub struct Vector3<S = scalar::Default> { pub x: S, pub y: S, pub z: S, } /// A 4-dimensional vector. #[repr(C)] #[derive(Default, Debug, PartialEq, Eq, Copy, Clone, Hash, Serialize, Deserialize)] pub struct Vector4<S = scalar::Default> { pub x: S, pub y: S, pub z: S, pub w: S, } // Generates index operators for a compound type // // Code originally from `cgmath` macros module. macro_rules! impl_index_operators { ($VectorN:ident < $S:ident > , $n:expr, $Output:ty, $I:ty) => { impl<$S> ::std::ops::Index<$I> for $VectorN<$S> { type Output = $Output; #[inline] fn index<'a>(&'a self, i: $I) -> &'a $Output { let v: &[$S; $n] = self.as_ref(); &v[i] } } impl<$S> ::std::ops::IndexMut<$I> for $VectorN<$S> { #[inline] fn index_mut<'a>(&'a mut self, i: $I) -> &'a mut $Output { let v: &mut [$S; $n] = self.as_mut(); &mut v[i] } } }; } // Utility macro for generating associated functions for the vectors macro_rules! impl_vector { ($VectorN:ident { $($field:ident),+ }, $n:expr, $constructor:ident) => { impl<S> $VectorN<S> { /// Construct a new vector, using the provided values. #[inline] pub fn new($($field: S),+) -> $VectorN<S> { $VectorN { $($field: $field),+ } } /// Construct a vector using the given value for each field. #[inline] pub fn from_value(scalar: S) -> $VectorN<S> where S: Clone, { $VectorN { $($field: scalar.clone()),+ } } /// The number of dimensions in the vector. #[inline] pub fn len(&self) -> usize { $n } /// Perform the given operation on each field in the vector, returning a new vector /// constructed from the operations. #[inline] pub fn map<U, F>(self, mut f: F) -> $VectorN<U> where F: FnMut(S) -> U, { $VectorN { $($field: f(self.$field)),+ } } /// Perform the given operation on each each field on both vectors, returning a new /// vector constructed from the operations. #[inline] pub fn zip_map<T, U, F>(self, other: $VectorN<T>, mut f: F) -> $VectorN<U> where F: FnMut(S, T) -> U, { $VectorN { $($field: f(self.$field, other.$field)),+ } } /// Test whether or not the vector is infinite. pub fn is_finite(&self) -> bool where S: BaseFloat, { $(self.$field.is_finite())&&+ } /// Component-wise casting to another type. #[inline] pub fn cast<T>(&self) -> Option<$VectorN<T>> where S: NumCast + Clone, T: NumCast, { $( let $field = match NumCast::from(self.$field.clone()) { Some(field) => field, None => return None }; )+ Some($VectorN { $($field),+ }) } /// A zeroed vector. #[inline] pub fn zero() -> $VectorN<S> where S: Zero, { $VectorN { $($field: S::zero()),+ } } /// Whether or not the vector is zeroed. #[inline] pub fn is_zero(&self) -> bool where S: PartialEq + Zero, { *self == $VectorN::zero() } /// A vector with `1` for each element. #[inline] pub fn one() -> $VectorN<S> where S: One, { $VectorN { $($field: S::one()),+ } } /// Whether or not each element in the vector is equal to `1`. #[inline] pub fn is_one(&self) -> bool where S: PartialEq + One, { *self == $VectorN::one() } /// Tests whether or not any of the vector's elements is `NaN`. #[inline] pub fn is_nan(&self) -> bool where S: BaseFloat, { $(self.$field.is_nan())||+ } /// Sum the fields of the vector. #[inline] pub fn sum(self) -> S where S: ops::Add<Output = S> + Copy, { math::Array::sum(self) } /// The product of the fields of the vector. #[inline] pub fn product(self) -> S where S: ops::Mul<Output = S> + Copy, { math::Array::product(self) } /// Return a vector whose magnitude is limited to the given value. #[inline] pub fn limit_magnitude(self, limit: S) -> Self where S: BaseFloat, { limit_magnitude(self, limit) } /// Return a vector with the given magnitude. #[inline] pub fn with_magnitude(self, magnitude: S) -> Self where S: BaseFloat, { self.normalize() * magnitude } /// Return a normalized vector. /// /// If `self` `is_zero`, this returns `self`. pub fn normalize(self) -> Self where S: BaseFloat, { if self.is_zero() { self } else { InnerSpace::normalize(self) } } /// The magnitude of the vector. /// /// The magnitude represents the distance from the origin to the point described by the /// vector. /// /// Note: This is equivalent to `.magnitude2().sqrt()`. As a result, it can be quite a /// bit more computationally efficient to use `.magnitude2()` directly when feasible. /// /// ## Example /// /// ``` /// # use nannou::prelude::*; /// # fn main() { /// let a = vec2(5.0, 0.0); /// let b = vec2(0.0, 5.0); /// assert_eq!(a.magnitude(), 5.0); /// assert_eq!(b.magnitude(), 5.0); /// # } /// /// ``` pub fn magnitude(self) -> S where S: BaseFloat, { InnerSpace::magnitude(self) } /// The square of the magnitude. /// /// See the `magnitude` docs for details. pub fn magnitude2(self) -> S where S: BaseFloat, { InnerSpace::magnitude2(self) } /// The dot product of self and the given vector. #[inline] pub fn dot(self, other: $VectorN<S>) -> S where S: BaseFloat, { InnerSpace::dot(self, other) } } impl<S> iter::Sum<$VectorN<S>> for $VectorN<S> where S: Zero + ops::Add<Output = S>, { #[inline] fn sum<I>(iter: I) -> $VectorN<S> where I: Iterator<Item = $VectorN<S>>, { iter.fold($VectorN::zero(), ops::Add::add) } } impl<'a, S: 'a> iter::Sum<&'a $VectorN<S>> for $VectorN<S> where S: 'a + Clone + Zero + ops::Add<Output = S>, { #[inline] fn sum<I>(iter: I) -> $VectorN<S> where I: Iterator<Item=&'a $VectorN<S>>, { iter.fold($VectorN::zero(), |acc, s| acc + s.clone())// ops::Add::add) } } // std::ops - vector vector impl<S> ops::Neg for $VectorN<S> where S: ops::Neg<Output = S>, { type Output = $VectorN<S>; #[inline] fn neg(self) -> $VectorN<S> { self.map(|s| -s) } } impl<S> ops::Add for $VectorN<S> where S: ops::Add<Output = S>, { type Output = $VectorN<S>; #[inline] fn add(self, other: Self) -> Self { self.zip_map(other, |a, b| a + b) } } impl<S> ops::Sub for $VectorN<S> where S: ops::Sub<Output = S>, { type Output = $VectorN<S>; #[inline] fn sub(self, other: Self) -> Self { self.zip_map(other, |a, b| a - b) } } impl<S> ops::Mul for $VectorN<S> where S: ops::Mul<Output = S>, { type Output = $VectorN<S>; #[inline] fn mul(self, other: Self) -> Self { self.zip_map(other, |a, b| a * b) } } impl<S> ops::Div for $VectorN<S> where S: ops::Div<Output = S>, { type Output = $VectorN<S>; #[inline] fn div(self, other: Self) -> Self { self.zip_map(other, |a, b| a / b) } } impl<S> ops::Rem for $VectorN<S> where S: ops::Rem<Output = S>, { type Output = $VectorN<S>; #[inline] fn rem(self, other: Self) -> Self { self.zip_map(other, |a, b| a % b) } } impl<S> ops::AddAssign for $VectorN<S> where S: ops::AddAssign, { fn add_assign(&mut self, other: Self) { $(self.$field += other.$field;)+ } } impl<S> ops::SubAssign for $VectorN<S> where S: ops::SubAssign, { fn sub_assign(&mut self, other: Self) { $(self.$field -= other.$field;)+ } } impl<S> ops::DivAssign for $VectorN<S> where S: Copy + ops::DivAssign, { #[inline] fn div_assign(&mut self, other: Self) { $(self.$field /= other.$field;)+ } } impl<S> ops::MulAssign for $VectorN<S> where S: Copy + ops::MulAssign, { #[inline] fn mul_assign(&mut self, other: Self) { $(self.$field *= other.$field;)+ } } impl<S> ops::RemAssign for $VectorN<S> where S: Copy + ops::RemAssign, { #[inline] fn rem_assign(&mut self, other: Self) { $(self.$field %= other.$field;)+ } } // std::ops - vector scalar impl<S> ops::Rem<S> for $VectorN<S> where S: Copy + ops::Rem<Output = S>, { type Output = $VectorN<S>; #[inline] fn rem(self, scalar: S) -> Self { self.map(|s| s % scalar) } } impl<S> ops::Div<S> for $VectorN<S> where S: Copy + ops::Div<Output = S>, { type Output = $VectorN<S>; #[inline] fn div(self, scalar: S) -> Self { self.map(|s| s / scalar) } } impl<S> ops::Mul<S> for $VectorN<S> where S: Copy + ops::Mul<Output = S>, { type Output = $VectorN<S>; #[inline] fn mul(self, scalar: S) -> Self { self.map(|s| s * scalar) } } impl<S> ops::RemAssign<S> for $VectorN<S> where S: Copy + ops::RemAssign, { #[inline] fn rem_assign(&mut self, scalar: S) { $(self.$field %= scalar;)+ } } impl<S> ops::DivAssign<S> for $VectorN<S> where S: Copy + ops::DivAssign, { #[inline] fn div_assign(&mut self, scalar: S) { $(self.$field /= scalar;)+ } } impl<S> ops::MulAssign<S> for $VectorN<S> where S: Copy + ops::MulAssign, { #[inline] fn mul_assign(&mut self, scalar: S) { $(self.$field *= scalar;)+ } } // indexing impl_index_operators!($VectorN<S>, $n, S, usize); impl_index_operators!($VectorN<S>, $n, [S], ops::Range<usize>); impl_index_operators!($VectorN<S>, $n, [S], ops::RangeTo<usize>); impl_index_operators!($VectorN<S>, $n, [S], ops::RangeFrom<usize>); impl_index_operators!($VectorN<S>, $n, [S], ops::RangeFull); // conversions impl<S> From<[S; $n]> for $VectorN<S> where S: Copy, { #[inline] fn from(v: [S; $n]) -> Self { let [$($field),+] = v; $VectorN { $($field),+ } } } impl<S> Into<[S; $n]> for $VectorN<S> { #[inline] fn into(self) -> [S; $n] { let $VectorN { $($field),+ } = self; [$($field),+] } } impl<S> AsRef<[S; $n]> for $VectorN<S> { #[inline] fn as_ref(&self) -> &[S; $n] { unsafe { let ptr = self as *const _ as *const [S; $n]; &*ptr } } } impl<S> AsMut<[S; $n]> for $VectorN<S> { #[inline] fn as_mut(&mut self) -> &mut [S; $n] { unsafe { let ptr = self as *mut _ as *mut [S; $n]; &mut*ptr } } } impl<S> ops::Deref for $VectorN<S> { type Target = [S; $n]; #[inline] fn deref(&self) -> &Self::Target { self.as_ref() } } impl<S> ops::DerefMut for $VectorN<S> { #[inline] fn deref_mut(&mut self) -> &mut Self::Target { self.as_mut() } } // num-traits impl<S> Bounded for $VectorN<S> where S: Bounded, { #[inline] fn min_value() -> $VectorN<S> { $VectorN { $($field: S::min_value()),+ } } #[inline] fn max_value() -> $VectorN<S> { $VectorN { $($field: S::max_value()),+ } } } impl<S> Zero for $VectorN<S> where S: PartialEq + Zero, { #[inline] fn zero() -> $VectorN<S> { $VectorN { $($field: S::zero()),* } } #[inline] fn is_zero(&self) -> bool { *self == $VectorN::zero() } } // `rand` crate implementations impl<S> Distribution<$VectorN<S>> for Standard where Standard: Distribution<S>, { fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> $VectorN<S> { $VectorN { $($field: rng.gen()),+ } } } /// The short constructor. #[inline] pub fn $constructor<S>($($field: S),+) -> $VectorN<S> { $VectorN::new($($field),+) } }; } mod cgmath_impl { // From `cgmath` macro_rules! fold_array { (& $method:ident, { $x:expr }) => { *$x }; (& $method:ident, { $x:expr, $y:expr }) => { $x.$method(&$y) }; (& $method:ident, { $x:expr, $y:expr, $z:expr }) => { $x.$method(&$y).$method(&$z) }; (& $method:ident, { $x:expr, $y:expr, $z:expr, $w:expr }) => { $x.$method(&$y).$method(&$z).$method(&$w) }; ($method:ident, { $x:expr }) => { $x }; ($method:ident, { $x:expr, $y:expr }) => { $x.$method($y) }; ($method:ident, { $x:expr, $y:expr, $z:expr }) => { $x.$method($y).$method($z) }; ($method:ident, { $x:expr, $y:expr, $z:expr, $w:expr }) => { $x.$method($y).$method($z).$method($w) }; } use super::{Vector2, Vector3, Vector4}; use crate::math::cgmath::{ self, Angle, Array, BaseFloat, BaseNum, ElementWise, EuclideanSpace, InnerSpace, MetricSpace, Rad, VectorSpace, }; use crate::math::cgmath::{AbsDiffEq, RelativeEq, UlpsEq}; use std::ops; macro_rules! impl_vector_cgmath { ($VectorN:ident { $($field:ident),+ }, $n:expr) => { impl<S> From<cgmath::$VectorN<S>> for $VectorN<S> { #[inline] fn from(v: cgmath::$VectorN<S>) -> Self { let cgmath::$VectorN { $($field),+ } = v; $VectorN { $($field),+ } } } impl<S> Into<cgmath::$VectorN<S>> for $VectorN<S> { #[inline] fn into(self) -> cgmath::$VectorN<S> { let $VectorN { $($field),+ } = self; cgmath::$VectorN { $($field),+ } } } impl<S> VectorSpace for $VectorN<S> where S: BaseNum, { type Scalar = S; } impl<S> MetricSpace for $VectorN<S> where S: BaseFloat, { type Metric = S; #[inline] fn distance2(self, other: Self) -> S { (other - self).magnitude2() } } impl<S> AbsDiffEq for $VectorN<S> where S: AbsDiffEq, S::Epsilon: Copy, { type Epsilon = S::Epsilon; #[inline] fn default_epsilon() -> S::Epsilon { S::default_epsilon() } #[inline] fn abs_diff_eq( &self, other: &Self, epsilon: Self::Epsilon, ) -> bool { $(self.$field.abs_diff_eq(&other.$field, epsilon))&&+ } } impl<S> RelativeEq for $VectorN<S> where S: RelativeEq, S::Epsilon: Copy, { #[inline] fn default_max_relative() -> S::Epsilon { S::default_max_relative() } #[inline] fn relative_eq( &self, other: &Self, epsilon: Self::Epsilon, max_relative: Self::Epsilon, ) -> bool { $(self.$field.relative_eq(&other.$field, epsilon, max_relative))&&+ } } impl<S> UlpsEq for $VectorN<S> where S: UlpsEq, S::Epsilon: Copy, { #[inline] fn default_max_ulps() -> u32 { S::default_max_ulps() } #[inline] fn ulps_eq(&self, other: &Self, epsilon: Self::Epsilon, max_ulps: u32) -> bool { $(self.$field.ulps_eq(&other.$field, epsilon, max_ulps))&&+ } } impl<S> ElementWise<S> for $VectorN<S> where S: BaseNum, { #[inline] fn add_element_wise(self, rhs: S) -> $VectorN<S> { $VectorN::new($(self.$field + rhs),+) } #[inline] fn sub_element_wise(self, rhs: S) -> $VectorN<S> { $VectorN::new($(self.$field - rhs),+) } #[inline] fn mul_element_wise(self, rhs: S) -> $VectorN<S> { $VectorN::new($(self.$field * rhs),+) } #[inline] fn div_element_wise(self, rhs: S) -> $VectorN<S> { $VectorN::new($(self.$field / rhs),+) } #[inline] fn rem_element_wise(self, rhs: S) -> $VectorN<S> { $VectorN::new($(self.$field % rhs),+) } #[inline] fn add_assign_element_wise(&mut self, rhs: S) { $(self.$field += rhs);+ } #[inline] fn sub_assign_element_wise(&mut self, rhs: S) { $(self.$field -= rhs);+ } #[inline] fn mul_assign_element_wise(&mut self, rhs: S) { $(self.$field *= rhs);+ } #[inline] fn div_assign_element_wise(&mut self, rhs: S) { $(self.$field /= rhs);+ } #[inline] fn rem_assign_element_wise(&mut self, rhs: S) { $(self.$field %= rhs);+ } } impl<S> ElementWise for $VectorN<S> where S: BaseFloat, { #[inline] fn add_element_wise(self, rhs: $VectorN<S>) -> $VectorN<S> { $VectorN::new($(self.$field + rhs.$field),+) } #[inline] fn sub_element_wise(self, rhs: $VectorN<S>) -> $VectorN<S> { $VectorN::new($(self.$field - rhs.$field),+) } #[inline] fn mul_element_wise(self, rhs: $VectorN<S>) -> $VectorN<S> { $VectorN::new($(self.$field * rhs.$field),+) } #[inline] fn div_element_wise(self, rhs: $VectorN<S>) -> $VectorN<S> { $VectorN::new($(self.$field / rhs.$field),+) } #[inline] fn rem_element_wise(self, rhs: $VectorN<S>) -> $VectorN<S> { $VectorN::new($(self.$field % rhs.$field),+) } #[inline] fn add_assign_element_wise(&mut self, rhs: $VectorN<S>) { $(self.$field += rhs.$field);+ } #[inline] fn sub_assign_element_wise(&mut self, rhs: $VectorN<S>) { $(self.$field -= rhs.$field);+ } #[inline] fn mul_assign_element_wise(&mut self, rhs: $VectorN<S>) { $(self.$field *= rhs.$field);+ } #[inline] fn div_assign_element_wise(&mut self, rhs: $VectorN<S>) { $(self.$field /= rhs.$field);+ } #[inline] fn rem_assign_element_wise(&mut self, rhs: $VectorN<S>) { $(self.$field %= rhs.$field);+ } } impl<S> Array for $VectorN<S> where S: Copy, { type Element = S; #[inline] fn len() -> usize { $n } #[inline] fn from_value(scalar: S) -> $VectorN<S> { $VectorN { $($field: scalar),+ } } #[inline] fn sum(self) -> S where S: ops::Add<Output = S>, { fold_array!(add, { $(self.$field),+ }) } #[inline] fn product(self) -> S where S: ops::Mul<Output = S>, { fold_array!(mul, { $(self.$field),+ }) } #[inline] fn is_finite(&self) -> bool where S: BaseFloat, { $(self.$field.is_finite())&&+ } } impl<S> EuclideanSpace for $VectorN<S> where S: BaseNum, { type Scalar = S; type Diff = $VectorN<S>; #[inline] fn origin() -> Self { $VectorN { $($field: S::zero()),+ } } #[inline] fn from_vec(v: $VectorN<S>) -> Self { $VectorN::new($(v.$field),+) } #[inline] fn to_vec(self) -> $VectorN<S> { $VectorN::new($(self.$field),+) } #[inline] fn dot(self, other: $VectorN<S>) -> S { $VectorN::new($(self.$field * other.$field),+).sum() } } } } // A macro to simplify the implementation of the point conversion traits. macro_rules! impl_point_conversions { ($VectorN:ident { $($field:ident),+ }, $PointN:ident) => { impl<S> From<cgmath::$PointN<S>> for $VectorN<S> { #[inline] fn from(v: cgmath::$PointN<S>) -> Self { let cgmath::$PointN { $($field),+ } = v; $VectorN { $($field),+ } } } impl<S> Into<cgmath::$PointN<S>> for $VectorN<S> { #[inline] fn into(self) -> cgmath::$PointN<S> { let $VectorN { $($field),+ } = self; cgmath::$PointN { $($field),+ } } } }; } impl_vector_cgmath!(Vector2 { x, y }, 2); impl_vector_cgmath!(Vector3 { x, y, z }, 3); impl_vector_cgmath!(Vector4 { x, y, z, w }, 4); impl_point_conversions!(Vector2 { x, y }, Point2); impl_point_conversions!(Vector3 { x, y, z }, Point3); impl<S> InnerSpace for Vector2<S> where S: BaseFloat, { #[inline] fn dot(self, other: Vector2<S>) -> S { Vector2::mul_element_wise(self, other).sum() } #[inline] fn angle(self, other: Vector2<S>) -> Rad<S> { Rad::atan2(Self::perp_dot(self, other), Self::dot(self, other)) } } impl<S> InnerSpace for Vector3<S> where S: BaseFloat, { #[inline] fn dot(self, other: Vector3<S>) -> S { Vector3::mul_element_wise(self, other).sum() } #[inline] fn angle(self, other: Vector3<S>) -> Rad<S> { Rad::atan2(self.cross(other).magnitude(), Self::dot(self, other)) } } impl<S> InnerSpace for Vector4<S> where S: BaseFloat, { #[inline] fn dot(self, other: Vector4<S>) -> S { Vector4::mul_element_wise(self, other).sum() } } } mod lyon_impl { use super::{Vector2, Vector3, Vector4}; use crate::math::Zero; impl<S> From<lyon::math::Point> for Vector2<S> where S: From<f32>, { fn from(p: lyon::math::Point) -> Self { (S::from(p.x), S::from(p.y)).into() } } impl<S> From<lyon::math::Point> for Vector3<S> where S: From<f32> + Zero, { fn from(p: lyon::math::Point) -> Self { Vector2::from(p).into() } } impl<S> From<lyon::math::Point> for Vector4<S> where S: From<f32> + Zero, { fn from(p: lyon::math::Point) -> Self { Vector2::from(p).into() } } impl<S> From<lyon::math::F64Point> for Vector2<S> where S: From<f64>, { fn from(p: lyon::math::F64Point) -> Self { (S::from(p.x), S::from(p.y)).into() } } impl<S> From<lyon::math::F64Point> for Vector3<S> where S: From<f64> + Zero, { fn from(p: lyon::math::F64Point) -> Self { Vector2::from(p).into() } } impl<S> From<lyon::math::F64Point> for Vector4<S> where S: From<f64> + Zero, { fn from(p: lyon::math::F64Point) -> Self { Vector2::from(p).into() } } impl<S> From<lyon::math::Vector> for Vector2<S> where S: From<f32>, { fn from(v: lyon::math::Vector) -> Self { (S::from(v.x), S::from(v.y)).into() } } impl<S> From<lyon::math::Vector> for Vector3<S> where S: From<f32> + Zero, { fn from(v: lyon::math::Vector) -> Self { Vector2::from(v).into() } } impl<S> From<lyon::math::Vector> for Vector4<S> where S: From<f32> + Zero, { fn from(v: lyon::math::Vector) -> Self { Vector2::from(v).into() } } impl<S> From<lyon::math::Size> for Vector2<S> where S: From<f32>, { fn from(p: lyon::math::Size) -> Self { (S::from(p.width), S::from(p.height)).into() } } impl<S> From<lyon::math::Size> for Vector3<S> where S: From<f32> + Zero, { fn from(p: lyon::math::Size) -> Self { Vector2::from(p).into() } } impl<S> From<lyon::math::Size> for Vector4<S> where S: From<f32> + Zero, { fn from(p: lyon::math::Size) -> Self { Vector2::from(p).into() } } impl Into<lyon::math::Point> for Vector2 { fn into(self) -> lyon::math::Point { (self.x, self.y).into() } } impl Into<lyon::math::Point> for Vector3 { fn into(self) -> lyon::math::Point { (self.x, self.y).into() } } impl Into<lyon::math::Point> for Vector4 { fn into(self) -> lyon::math::Point { (self.x, self.y).into() } } impl<S> Into<lyon::math::F64Point> for Vector2<S> where S: Into<f64>, { fn into(self) -> lyon::math::F64Point { (self.x.into(), self.y.into()).into() } } impl<S> Into<lyon::math::F64Point> for Vector3<S> where S: Into<f64>, { fn into(self) -> lyon::math::F64Point { (self.x.into(), self.y.into()).into() } } impl<S> Into<lyon::math::F64Point> for Vector4<S> where S: Into<f64>, { fn into(self) -> lyon::math::F64Point { (self.x.into(), self.y.into()).into() } } impl Into<lyon::math::Vector> for Vector2 { fn into(self) -> lyon::math::Vector { (self.x, self.y).into() } } impl Into<lyon::math::Vector> for Vector3 { fn into(self) -> lyon::math::Vector { (self.x, self.y).into() } } impl Into<lyon::math::Vector> for Vector4 { fn into(self) -> lyon::math::Vector { (self.x, self.y).into() } } impl Into<lyon::math::Size> for Vector2 { fn into(self) -> lyon::math::Size { (self.x, self.y).into() } } impl Into<lyon::math::Size> for Vector3 { fn into(self) -> lyon::math::Size { (self.x, self.y).into() } } impl Into<lyon::math::Size> for Vector4 { fn into(self) -> lyon::math::Size { (self.x, self.y).into() } } } impl_vector!(Vector2 { x, y }, 2, vec2); impl_vector!(Vector3 { x, y, z }, 3, vec3); impl_vector!(Vector4 { x, y, z, w }, 4, vec4); // tuple conversions impl<S> From<(S, S)> for Vector2<S> { fn from((x, y): (S, S)) -> Self { Vector2 { x, y } } } impl<S> From<(S, S, S)> for Vector3<S> { fn from((x, y, z): (S, S, S)) -> Self { Vector3 { x, y, z } } } impl<S> From<(S, S, S, S)> for Vector4<S> { fn from((x, y, z, w): (S, S, S, S)) -> Self { Vector4 { x, y, z, w } } } impl<S> Into<(S, S)> for Vector2<S> { fn into(self) -> (S, S) { let Vector2 { x, y } = self; (x, y) } } impl<S> Into<(S, S, S)> for Vector3<S> { fn into(self) -> (S, S, S) { let Vector3 { x, y, z } = self; (x, y, z) } } impl<S> Into<(S, S, S, S)> for Vector4<S> { fn into(self) -> (S, S, S, S) { let Vector4 { x, y, z, w } = self; (x, y, z, w) } } // expanding tuple conversions impl<S> From<(S, S)> for Vector3<S> where S: Zero, { fn from((x, y): (S, S)) -> Self { let z = S::zero(); Vector3 { x, y, z } } } impl<S> From<(S, S)> for Vector4<S> where S: Zero, { fn from((x, y): (S, S)) -> Self { let z = S::zero(); let w = S::zero(); Vector4 { x, y, z, w } } } impl<S> From<(S, S, S)> for Vector4<S> where S: Zero, { fn from((x, y, z): (S, S, S)) -> Self { let w = S::zero(); Vector4 { x, y, z, w } } } // expanding fixed-size array conversions impl<S> From<[S; 2]> for Vector3<S> where S: Zero, { fn from([x, y]: [S; 2]) -> Self { let z = S::zero(); Vector3 { x, y, z } } } impl<S> From<[S; 2]> for Vector4<S> where S: Zero, { fn from([x, y]: [S; 2]) -> Self { let z = S::zero(); let w = S::zero(); Vector4 { x, y, z, w } } } impl<S> From<[S; 3]> for Vector4<S> where S: Zero, { fn from([x, y, z]: [S; 3]) -> Self { let w = S::zero(); Vector4 { x, y, z, w } } } // expanding vector conversions impl<S> From<Vector2<S>> for Vector3<S> where S: Zero, { fn from(Vector2 { x, y }: Vector2<S>) -> Self { let z = S::zero(); Vector3 { x, y, z } } } impl<S> From<Vector2<S>> for Vector4<S> where S: Zero, { fn from(Vector2 { x, y }: Vector2<S>) -> Self { let z = S::zero(); let w = S::zero(); Vector4 { x, y, z, w } } } impl<S> From<Vector3<S>> for Vector4<S> where S: Zero, { fn from(Vector3 { x, y, z }: Vector3<S>) -> Self { let w = S::zero(); Vector4 { x, y, z, w } } } // Vector 2 impl<S> Vector2<S> { /// A unit vector in the `x` direction. #[inline] pub fn unit_x() -> Vector2<S> where S: Zero + One, { Vector2::new(S::one(), S::zero()) } /// A unit vector in the `y` direction. #[inline] pub fn unit_y() -> Vector2<S> where S: Zero + One, { Vector2::new(S::zero(), S::one()) } /// The perpendicular dot product of the vector and `other`. #[inline] pub fn perp_dot(self, other: Vector2<S>) -> S where S: ops::Sub<Output = S> + ops::Mul<Output = S>, { (self.x * other.y) - (self.y * other.x) } /// Create a `Vector3`, using the `x` and `y` values from this vector, and the /// provided `z`. #[inline] pub fn extend(self, z: S) -> Vector3<S> { Vector3::new(self.x, self.y, z) } /// Construct a normalised (aka "unit") vector from the given angle in radians. /// /// # Examples /// /// ``` /// # use nannou::prelude::*; /// # fn main() { /// assert_eq!(Vector2::from_angle(0.0), vec2(1.0, 0.0)); /// // Keep an eye out for accumulating floating point error. /// assert_eq!(Vector2::from_angle(PI * 0.5), vec2(-0.00000004371139, 1.0)); /// assert_eq!(Vector2::from_angle(PI), vec2(-1.0, -0.00000008742278)); /// assert_eq!(Vector2::from_angle(PI * 1.5), vec2(0.000000011924881, -1.0)); /// assert_eq!(Vector2::from_angle(TAU), vec2(1.0, 0.00000017484555)); /// # } /// ``` pub fn from_angle(radians: S) -> Self where S: BaseFloat, { vec2(radians.cos(), radians.sin()) } /// Returns the angle of the vector in radians. /// /// # Examples /// /// ``` /// # use nannou::prelude::*; /// # use nannou::Draw; /// # fn main() { /// let v = vec2(-0.5, 0.5); /// let radians = v.angle(); /// # let draw = Draw::new(); /// draw.quad() /// .rotate(radians); /// assert_eq!(radians, 2.356194490192345); /// # } /// ``` /// pub fn angle(self) -> S where S: BaseFloat, { self.y.atan2(self.x) } /// Returns the angle of the vector between `self` and `other` in radians. /// /// The result is between 0 and PI. Note: Nannou's implementation is commutative /// (`v1.angle_between(v2)` == `v2.angle_between(v1)`). /// /// # Example /// /// ``` /// # use nannou::prelude::*; /// # fn main() { /// let right = vec2(2.0, 0.0); /// let up = vec2(0.0, 3.0); /// let down = vec2(0.0, -100.0); /// assert_eq!(right.angle_between(up), PI/2.0); /// assert_eq!(right.angle_between(down), PI/2.0); /// # } /// ``` pub fn angle_between(self, other: Self) -> S where S: BaseFloat, { let cos_theta = self.dot(other) / (self.magnitude() * other.magnitude()); // Handle float rounding issues by clamping to [-1, 1]. let cos_theta = cos_theta.min(S::one()).max(-S::one()); cos_theta.acos() } /// Rotate the vector around the origin (0.0, 0.0) by the given radians. /// /// # Examples /// /// ``` /// # use nannou::prelude::*; /// # fn main() { /// let v = vec2(100.0, 0.0); /// assert_eq!(v.rotate(PI).x, -v.x); /// assert_eq!(v.rotate(TAU).x, v.x); /// # } /// ``` pub fn rotate(self, radians: S) -> Self where S: BaseFloat, { let rad_cos = radians.cos(); let rad_sin = radians.sin(); let x = self.x * rad_cos - self.y * rad_sin; let y = self.x * rad_sin + self.y * rad_cos; vec2(x, y) } //impl_swizzle_functions!(Vector1, Vector2, Vector3, Vector4, S, xy); } // Vector 3 impl<S> Vector3<S> { /// A unit vector in the `x` direction. #[inline] pub fn unit_x() -> Vector3<S> where S: Zero + One, { Vector3::new(S::one(), S::zero(), S::zero()) } /// A unit vector in the `y` direction. #[inline]<|fim▁hole|> pub fn unit_y() -> Vector3<S> where S: Zero + One, { Vector3::new(S::zero(), S::one(), S::zero()) } /// A unit vector in the `z` direction. #[inline] pub fn unit_z() -> Vector3<S> where S: Zero + One, { Vector3::new(S::zero(), S::zero(), S::one()) } /// Returns the cross product of the vector and `other`. #[inline] pub fn cross(self, other: Vector3<S>) -> Vector3<S> where S: Copy + ops::Sub<Output = S> + ops::Mul<Output = S>, { Vector3::new( (self.y * other.z) - (self.z * other.y), (self.z * other.x) - (self.x * other.z), (self.x * other.y) - (self.y * other.x), ) } /// Create a `Vector4`, using the `x`, `y` and `z` values from this vector, and the /// provided `w`. #[inline] pub fn extend(self, w: S) -> Vector4<S> { Vector4::new(self.x, self.y, self.z, w) } /// Create a `Vector2`, dropping the `z` value. #[inline] pub fn truncate(self) -> Vector2<S> { Vector2::new(self.x, self.y) } // impl_swizzle_functions!(Vector1, Vector2, Vector3, Vector4, S, xyz); } // Vector 4 impl<S> Vector4<S> { /// A unit vector in the `x` direction. #[inline] pub fn unit_x() -> Vector4<S> where S: Zero + One, { Vector4::new(S::one(), S::zero(), S::zero(), S::zero()) } /// A unit vector in the `y` direction. #[inline] pub fn unit_y() -> Vector4<S> where S: Zero + One, { Vector4::new(S::zero(), S::one(), S::zero(), S::zero()) } /// A unit vector in the `z` direction. #[inline] pub fn unit_z() -> Vector4<S> where S: Zero + One, { Vector4::new(S::zero(), S::zero(), S::one(), S::zero()) } /// A unit vector in the `w` direction. #[inline] pub fn unit_w() -> Vector4<S> where S: Zero + One, { Vector4::new(S::zero(), S::zero(), S::zero(), S::one()) } /// Create a `Vector3`, dropping the `w` value. #[inline] pub fn truncate(self) -> Vector3<S> { Vector3::new(self.x, self.y, self.z) } /// Create a `Vector3`, dropping the nth element. #[inline] pub fn truncate_n(&self, n: isize) -> Vector3<S> where S: Copy, { match n { 0 => Vector3::new(self.y, self.z, self.w), 1 => Vector3::new(self.x, self.z, self.w), 2 => Vector3::new(self.x, self.y, self.w), 3 => Vector3::new(self.x, self.y, self.z), _ => panic!("{:?} is out of range", n), } } //impl_swizzle_functions!(Vector1, Vector2, Vector3, Vector4, S, xyzw); } // utility functions fn limit_magnitude<V>(v: V, limit: V::Scalar) -> V where V: InnerSpace, V::Scalar: BaseFloat, { let magnitude2 = v.magnitude2(); if magnitude2 <= limit * limit { v } else { v.normalize() * limit } } #[cfg(test)] mod tests { use super::*; use crate::prelude::*; use cgmath::assert_ulps_eq; #[test] fn test_angle_between() { let right = vec2(1.0, 0.0); let upright = vec2(1.0, 1.0); assert_eq!(right.angle_between(upright), PI / 4.0); // angle_between is symmetric. assert_eq!(upright.angle_between(right), PI / 4.0); let left = vec2(-1.0, 0.0); let left2 = vec2(-1.0123456789, 0.0); assert_ulps_eq!(right.angle_between(left), PI); assert_ulps_eq!(right.angle_between(left2), PI); // angle between same vector is 0. assert_eq!(upright.angle_between(upright), 0.0); assert_eq!(left.angle_between(left), 0.0); assert_eq!(left2.angle_between(left2), 0.0); // handles zero vector. assert_eq!(right.angle_between(vec2(0.0, 0.0)), 0.0); assert_eq!(left2.angle_between(vec2(0.0, 0.0)), 0.0); } }<|fim▁end|>
<|file_name|>opener.js<|end_file_name|><|fim▁begin|>/*-------------------------------------------------------- * Copyright (c) 2011, The Dojo Foundation * This software is distributed under the "Simplified BSD license", * the text of which is available at http://www.winktoolkit.org/licence.txt * or see the "license.txt" file for more details. *--------------------------------------------------------*/ /** * @fileOverview Implements an image opener. Creates an "image opener" with a 3D rendering * * @author Jerome GIRAUD */ /** * The event is fired when someone clicks on the image * * @name wink.ui.xyz.Opener#/opener/events/click * @event * @param {object} param The parameters object * @param {integer} param.openerId uId of the opener */ define(['../../../../_amd/core', '../../../../math/_geometric/js/geometric', '../../../../fx/_xyz/js/3dfx'], function(wink) { /** * @class Implements an image opener. Creates an "image opener" with a 3D rendering. * Define the image you want to see as the opener's background. Use the "getDomNode" method to insert the opener into the page. * * @param {object} properties The properties object * @param {string} properties.image The URL of the image to display * @param {integer} properties.height The height of the opener (should be the same as the image height) * @param {integer} properties.width The width of the opener (should be the same as the image width) * @param {integer} [properties.panelHeight=20] The height of each panel. The image is divided into X panels * @param {integer} [properties.panelsAngle=140] The winding angle of the opener * @param {integer} [properties.openerXAngle=10] The angle between the opener and the page on the X-axis * @param {integer} [properties.openerYAngle=15] The angle between the opener and the page on the Y-axis * @param {integer} [properties.duration=500] The opening duration in milliseconds * * @requires wink.math._geometric * @requires wink.math._matrix * @requires wink.fx._xyz * * @example * * var properties = * { * 'image': './img/wink.png', * 'height': 185, * 'width': 185, * 'panelsAngle': 200, * 'panelHeight': 5, * 'openerXAngle': 5, * 'openerYAngle': -50, * 'duration': 300 * } * * opener = new wink.ui.xyz.Opener(properties); * wink.byId('content').appendChild(opener.getDomNode()); * * @compatibility iOS2, iOS3, iOS4, iOS5, iOS6, Android 3.0, Android 3.1, Android 4.0, Android 4.1.2, BlackBerry 7, BB10 * * @see <a href="WINK_ROOT_URL/ui/xyz/opener/test/test_opener_1.html" target="_blank">Test page</a> * @see <a href="WINK_ROOT_URL/ui/xyz/opener/test/test_opener_2.html" target="_blank">Test page</a> */ wink.ui.xyz.Opener = function(properties) { /** * Unique identifier * * @property uId * @type integer */ this.uId = wink.getUId(); /** * True if the image is "opened", false otherwise * * @property opened * @type boolean */ this.opened = false; /** * The URL of the opener image * * @property image * @type string */ this.image = null; /** * The height of the opener * * @property height * @type integer */ this.height = 0; /** * The width of the opener * * @property width * @type integer */ this.width = 0; /** * The height of each panel * * @property panelHeight * @type integer */ this.panelHeight = 20; /** * The winding angle of the opener * * @property panelsAngle * @type integer */ this.panelsAngle = 140; /** * The angle between the opener and the page on the X-axis * * @property openerXAngle * @type integer */ this.openerXAngle = 10; /** * The angle between the opener and the page on the Y-axis * * @property the angle between the opener and the page on the Y-axis * @type integer */ this.openerYAngle = 15; /** * the opening duration in milliseconds * * @property duration * @type integer */ this.duration = 500; this._nbPanels = 0; this._panelAngle = 0; this._panelsList = []; this._domNode = null; this._panelsNode = null; this._contentNode = null; wink.mixin(this, properties); if ( this._validateProperties() === false )return; this._initProperties(); this._initDom(); this._initListeners(); }; wink.ui.xyz.Opener.prototype = { /** * @returns {HTMLElement} The dom node containing the Opener */ getDomNode: function() { return this._domNode; }, /** * Opens the image */ open: function() { wink.fx.initComposedTransform(this._panelsNode, false); wink.fx.setTransformPart(this._panelsNode, 1, { type: 'rotate', x: 0, y: 1, z: 0, angle: this.openerYAngle }); wink.fx.setTransformPart(this._panelsNode, 2, { type: 'rotate', x: 1, y: 0, z: 0, angle: this.openerXAngle }); wink.fx.applyComposedTransform(this._panelsNode); this._domNode.style['height'] = '0px'; var l = this._panelsList.length; for ( var i = l-1; i > 0; i-- ) { this._panelsList[i].open(); } this.opened = true; }, /** * Closes the image */ close: function() { wink.fx.setTransformPart(this._panelsNode, 1, { type: 'rotate', x: 0, y: 1, z: 0, angle: 0 }); wink.fx.setTransformPart(this._panelsNode, 2, { type: 'rotate', x: 1, y: 0, z: 0, angle: 0 }); wink.fx.applyComposedTransform(this._panelsNode); this._domNode.style['height'] = 'auto'; var l = this._panelsList.length; for ( var i = l-1; i > 0; i-- ) { this._panelsList[i].close(); } this.opened = false; }, /** * Toggles the image display */ toggle: function() { if ( this.opened ) { this.close(); } else { this.open(); } }, /** * Handles the click events */ _handleClick: function() { this.toggle(); wink.publish('/opener/events/click', {'openerId': this.uId}); }, /** * Validate the properties of the component * @returns {boolean} True if the properties are valid, false otherwise */ _validateProperties: function() { // Check duration if ( !wink.isInteger(this.duration) ) { wink.log('[Opener] The property duration must be an integer'); return false; } // Check opener X angle if ( !wink.isInteger(this.openerXAngle) ) { wink.log('[Opener] The property openerXAngle must be an integer'); return false; } // Check opener Y angle if ( !wink.isInteger(this.openerYAngle) ) { wink.log('[Opener] The property openerYAngle must be an integer'); return false; } // Check panel angle if ( !wink.isInteger(this.panelsAngle) ) { wink.log('[Opener] The property panelsAngle must be an integer'); return false; } // Check panelHeight if ( !wink.isInteger(this.panelHeight) ) { wink.log('[Opener] The property panelHeight must be an integer'); return false; } // Check height if ( !wink.isInteger(this.height) ) { wink.log('[Opener] The property height must be an integer'); return false; } // Check width if ( !wink.isInteger(this.width) ) { wink.log('[Opener] The property width must be an integer'); return false; } // Check image if ( !wink.isSet(this.image) ) { wink.log('[Opener] The property image must be set'); return false; } return true; }, /** * Initialize the 'click' listener */ _initListeners: function() { wink.subscribe('/opener_panel/events/click', {context: this, method: '_handleClick'}); }, /** * Initialize the Opener properties */ _initProperties: function() { this._nbPanels = Math.ceil(this.height / this.panelHeight); this._panelAngle = this.panelsAngle / this._nbPanels; }, /** * Initialize the Opener DOM nodes */ _initDom: function() { this._domNode = document.createElement('div'); this._domNode.className = 'op_container'; wink.fx.apply(this._domNode, { height: this.height + 'px', width: this.width + 'px' }); this._panelsNode = document.createElement('div'); this._panelsNode.className = 'op_panels'; wink.fx.apply(this._panelsNode, {'transform-origin': '100% 0', 'transform-style': 'preserve-3d'}); for ( var i=0; i<this._nbPanels; i++ ) { var panel = new wink.ui.xyz.Opener.Panel({index: i, image: this.image, height: this.panelHeight, angle: this._panelAngle}); this._panelsList.push(panel); this._panelsNode.appendChild(panel.getDomNode()); wink.fx.applyTransformTransition(panel.getDomNode(), this.duration + 'ms', '0ms', 'linear'); } this._domNode.appendChild(this._panelsNode); wink.fx.applyTransformTransition(this._panelsNode, this.duration + 'ms', '0ms', 'linear'); } }; /** * @class Implements an image opener panel. Should only be instantiated by the Opener itself * * @param {object} properties The properties object * @param {integer} properties.index The position of the panel in the panels list * @param {string} properties.image The URL of the image to display * @param {integer} properties.height The height of the panel * @param {integer} properties.angle The opening angle of the panel * */ wink.ui.xyz.Opener.Panel = function(properties) { /** * Unique identifier * * @property uId * @type integer */ this.uId = wink.getUId(); /** * The position of the panel * * @property index * @type integer */ this.index = null; /** * The URL of the image to display * * @property image * @type string */ this.image = null; /** * The height of the panel * * @property height * @type integer */ this.height = 0; /** * The opening angle of the panel * * @property angle * @type integer */ this.angle = 0; this._y = 0; this._z = 0; this._domNode = null; wink.mixin(this, properties); this._initProperties(); this._initDom(); }; /** * The event is fired when someone clicks on the panel * * @name wink.ui.xyz.Opener#/opener_panel/events/click * @event * @param {object} param The parameters object <|fim▁hole|> { /** * @returns {HTMLElement} The component main dom node */ getDomNode: function() { return this._domNode; }, /** * Opens the image */ open: function() { wink.fx.initComposedTransform(this._domNode, false); wink.fx.setTransformPart(this._domNode, 1, { type: 'rotate', x: 1, y: 0, z: 0, angle: (this.angle*(this.index)) }); wink.fx.setTransformPart(this._domNode, 2, { type: 'translate', x: 0, y: this._y, z: this._z }); wink.fx.applyComposedTransform(this._domNode); }, /** * Closes the image */ close: function() { wink.fx.setTransformPart(this._domNode, 1, { type: 'rotate', x: 1, y: 0, z: 0, angle: 0 }); wink.fx.setTransformPart(this._domNode, 2, { type: 'translate', x: 0, y: (this.index * this.height), z: 0 }); wink.fx.applyComposedTransform(this._domNode); }, /** * Initialize the Panel properties */ _initProperties: function() { for ( var i=0; i<this.index; i++ ) { this._y += Math.cos(wink.math.degToRad(this.angle*i))*this.height; this._z += Math.sin(wink.math.degToRad(this.angle*i))*this.height; } }, /** * Initialize the Panel DOM node */ _initDom: function() { this._domNode = document.createElement('div'); this._domNode.className = 'op_panel'; wink.fx.apply(this._domNode, { height: (this.height + 2) + 'px', 'transform-origin': '0 0', backgroundImage: 'url(' + this.image + ')', backgroundRepeat: 'no-repeat', backgroundPositionX: '0', backgroundPositionY: -this.index*this.height + 'px' }); this._domNode.onclick = function() { wink.publish('/opener_panel/events/click', {'panelId': this.uId}); }; wink.fx.translate(this._domNode, 0, this.index*this.height); } }; return wink.ui.xyz.Opener; });<|fim▁end|>
* @param {integer} param.panelId uId of the panel */ wink.ui.xyz.Opener.Panel.prototype =
<|file_name|>manual_non_exhaustive.rs<|end_file_name|><|fim▁begin|>#![warn(clippy::manual_non_exhaustive)] #![allow(unused)] mod enums { enum E { A, B, #[doc(hidden)] _C, } // user forgot to remove the marker #[non_exhaustive] enum Ep { A, B, #[doc(hidden)] _C, } // marker variant does not have doc hidden attribute, should be ignored enum NoDocHidden { A, B, _C, } // name of variant with doc hidden does not start with underscore, should be ignored enum NoUnderscore { A, B, #[doc(hidden)] C, } // variant with doc hidden is not unit, should be ignored enum NotUnit { A, B, #[doc(hidden)] _C(bool), } // variant with doc hidden is the only one, should be ignored enum OnlyMarker { #[doc(hidden)] _A, } // variant with multiple markers, should be ignored enum MultipleMarkers { A, #[doc(hidden)] _B, #[doc(hidden)] _C, } // already non_exhaustive and no markers, should be ignored #[non_exhaustive] enum NonExhaustive { A, B, } } mod structs { struct S { pub a: i32, pub b: i32, _c: (), } // user forgot to remove the private field #[non_exhaustive] struct Sp { pub a: i32, pub b: i32, _c: (), } // some other fields are private, should be ignored struct PrivateFields { a: i32, pub b: i32, _c: (), } // private field name does not start with underscore, should be ignored struct NoUnderscore { pub a: i32, pub b: i32, c: (), } // private field is not unit type, should be ignored struct NotUnit { pub a: i32, pub b: i32, _c: i32, } // private field is the only field, should be ignored struct OnlyMarker { _a: (), } // already non exhaustive and no private fields, should be ignored #[non_exhaustive] struct NonExhaustive { pub a: i32, pub b: i32, } } mod tuple_structs { struct T(pub i32, pub i32, ()); // user forgot to remove the private field #[non_exhaustive] struct Tp(pub i32, pub i32, ()); // some other fields are private, should be ignored struct PrivateFields(pub i32, i32, ()); // private field is not unit type, should be ignored struct NotUnit(pub i32, pub i32, i32); <|fim▁hole|> // already non exhaustive and no private fields, should be ignored #[non_exhaustive] struct NonExhaustive(pub i32, pub i32); } fn main() {}<|fim▁end|>
// private field is the only field, should be ignored struct OnlyMarker(());
<|file_name|>credit_seat_spec.js<|end_file_name|><|fim▁begin|>define([ 'collections/credit_provider_collection', 'ecommerce', 'models/course_seats/credit_seat' ], function (CreditProviderCollection, ecommerce, CreditSeat) { 'use strict'; var model, data = { id: 9, url: 'http://ecommerce.local:8002/api/v2/products/9/', structure: 'child', product_class: 'Seat', title: 'Seat in edX Demonstration Course with honor certificate', price: '0.00', expires: null, attribute_values: [ { name: 'certificate_type', value: 'credit' }, { name: 'course_key', value: 'edX/DemoX/Demo_Course' }, { name: 'id_verification_required', value: false } ], is_available_to_buy: true }; beforeEach(function () { model = CreditSeat.findOrCreate(data, {parse: true}); ecommerce.credit.providers = new CreditProviderCollection([{id: 'harvard', display_name: 'Harvard'}]); }); describe('Credit course seat model', function () { describe('credit provider validation', function () { function assertCreditProviderInvalid(credit_provider, expected_msg) { model.set('credit_provider', credit_provider); expect(model.validate().credit_provider).toEqual(expected_msg); expect(model.isValid(true)).toBeFalsy(); } it('should do nothing if the credit provider is valid', function () { model.set('credit_provider', ecommerce.credit.providers.at(0).get('id')); expect(model.validate().credit_provider).toBeUndefined(); }); <|fim▁hole|> values.forEach(function (value) { assertCreditProviderInvalid(value, msg); }); }); it('should return a message if the credit provider is not a valid credit provider', function () { var msg = 'Please select a valid credit provider.'; assertCreditProviderInvalid('acme', msg); }); }); }); } );<|fim▁end|>
it('should return a message if the credit provider is not set', function () { var msg = 'All credit seats must have a credit provider.', values = [null, undefined, ''];
<|file_name|>RedisCluster.java<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2011 Jonathan Leibiusky * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the * Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.navercorp.redis.cluster; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.navercorp.redis.cluster.pipeline.BuilderFactory; import redis.clients.jedis.BinaryClient.LIST_POSITION; import redis.clients.jedis.Tuple; /** * The Class RedisCluster. * * @author jaehong.kim */ public class RedisCluster extends BinaryRedisCluster implements RedisClusterCommands { /** * Instantiates a new redis cluster. * * @param host the host */ public RedisCluster(final String host) { super(host); } /** * Instantiates a new redis cluster. * * @param host the host * @param port the port */ public RedisCluster(final String host, final int port) { super(host, port); } /** * Instantiates a new redis cluster. * * @param host the host * @param port the port * @param timeout the timeout */ public RedisCluster(final String host, final int port, final int timeout) { super(host, port, timeout); } // //////////////////////////////////////////////////////////////////////////////////////////////////////// // Keys public Long del(final String... keys) { client.del(keys); return client.getIntegerReply(); } public Boolean exists(final String key) { client.exists(key); return client.getIntegerReply() == 1; } public Long expire(final String key, final int seconds) { client.expire(key, seconds); return client.getIntegerReply(); } public Long expireAt(final String key, final long secondsTimestamp) { client.expireAt(key, secondsTimestamp); return client.getIntegerReply(); } public Long pexpire(final String key, final long milliseconds) { client.pexpire(key, milliseconds); return client.getIntegerReply(); } public Long pexpireAt(final String key, final long millisecondsTimestamp) { client.pexpireAt(key, millisecondsTimestamp); return client.getIntegerReply(); } public Long objectRefcount(String string) { client.objectRefcount(string); return client.getIntegerReply(); } public String objectEncoding(String string) { client.objectEncoding(string); return client.getBulkReply(); } public Long objectIdletime(String string) { client.objectIdletime(string); return client.getIntegerReply(); } public Long ttl(final String key) { client.ttl(key); return client.getIntegerReply(); } public Long pttl(final String key) { client.pttl(key); return client.getIntegerReply(); } public String type(final String key) { client.type(key); return client.getStatusCodeReply(); } public Long persist(final String key) { client.persist(key); return client.getIntegerReply(); } // //////////////////////////////////////////////////////////////////////////////////////////////////////// // Strings public Long append(final String key, final String value) { client.append(key, value); return client.getIntegerReply(); } public Long decr(final String key) { client.decr(key); return client.getIntegerReply(); } public Long decrBy(final String key, final long integer) { client.decrBy(key, integer); return client.getIntegerReply(); } public String get(final String key) { client.get(key); return client.getBulkReply(); } public Boolean getbit(String key, long offset) { client.getbit(key, offset); return client.getIntegerReply() == 1; } public String getrange(String key, long startOffset, long endOffset) { client.getrange(key, startOffset, endOffset); return client.getBulkReply(); } public String substr(final String key, final int start, final int end) { client.substr(key, start, end); return client.getBulkReply(); } public String getSet(final String key, final String value) { client.getSet(key, value); return client.getBulkReply(); } public Long incr(final String key) { client.incr(key); return client.getIntegerReply(); } public Long incrBy(final String key, final long integer) { client.incrBy(key, integer); return client.getIntegerReply(); } public Double incrByFloat(final String key, final double increment) { client.incrByFloat(key, increment); String reply = client.getBulkReply(); return (reply != null ? new Double(reply) : null); } public String set(final String key, String value) { client.set(key, value); return client.getStatusCodeReply(); } public Boolean setbit(String key, long offset, boolean value) { client.setbit(key, offset, value); return client.getIntegerReply() == 1; } public String setex(final String key, final int seconds, final String value) { client.setex(key, seconds, value); return client.getStatusCodeReply(); } public Long setnx(final String key, final String value) { client.setnx(key, value); return client.getIntegerReply(); } public Long setrange(String key, long offset, String value) { client.setrange(key, offset, value); return client.getIntegerReply(); } public Long strlen(final String key) { client.strlen(key); return client.getIntegerReply(); } public List<String> mget(final String... keys) { client.mget(keys); return client.getMultiBulkReply(); } public String psetex(final String key, final long milliseconds, final String value) { client.psetex(key, milliseconds, value); return client.getStatusCodeReply(); } public String mset(String... keysvalues) { client.mset(keysvalues); return client.getStatusCodeReply(); } public Long bitcount(final String key) { client.bitcount(key); return client.getIntegerReply(); } public Long bitcount(final String key, long start, long end) { client.bitcount(key, start, end); return client.getIntegerReply(); } // //////////////////////////////////////////////////////////////////////////////////////////////////////// // Hashes public Long hdel(final String key, final String... fields) { client.hdel(key, fields); return client.getIntegerReply(); } public Boolean hexists(final String key, final String field) { client.hexists(key, field); return client.getIntegerReply() == 1; } public String hget(final String key, final String field) { client.hget(key, field); return client.getBulkReply(); } public Map<String, String> hgetAll(final String key) { client.hgetAll(key); return BuilderFactory.STRING_MAP .build(client.getBinaryMultiBulkReply()); } public Long hincrBy(final String key, final String field, final long value) { client.hincrBy(key, field, value); return client.getIntegerReply(); } public Double hincrByFloat(final String key, final String field, double increment) { client.hincrByFloat(key, field, increment); String reply = client.getBulkReply(); return (reply != null ? new Double(reply) : null); } public Set<String> hkeys(final String key) { client.hkeys(key); return BuilderFactory.STRING_SET .build(client.getBinaryMultiBulkReply()); } public Long hlen(final String key) { client.hlen(key); return client.getIntegerReply(); } public List<String> hmget(final String key, final String... fields) { client.hmget(key, fields); return client.getMultiBulkReply(); } public String hmset(final String key, final Map<String, String> hash) { client.hmset(key, hash); return client.getStatusCodeReply(); } public Long hset(final String key, final String field, final String value) { client.hset(key, field, value); return client.getIntegerReply(); } public Long hsetnx(final String key, final String field, final String value) { client.hsetnx(key, field, value); return client.getIntegerReply(); } public List<String> hvals(final String key) { client.hvals(key); final List<String> lresult = client.getMultiBulkReply(); return lresult; } // //////////////////////////////////////////////////////////////////////////////////////////////////////// // Lists public String lindex(final String key, final long index) { client.lindex(key, index); return client.getBulkReply(); } public Long linsert(final String key, final LIST_POSITION where, final String pivot, final String value) { client.linsert(key, where, pivot, value); return client.getIntegerReply(); } public Long llen(final String key) { client.llen(key); return client.getIntegerReply(); } public String lpop(final String key) { client.lpop(key); return client.getBulkReply(); } public Long lpush(final String key, final String... strings) { client.lpush(key, strings); return client.getIntegerReply(); } public Long lpushx(final String key, final String string) { client.lpushx(key, string); return client.getIntegerReply(); } public List<String> lrange(final String key, final long start, final long end) { client.lrange(key, start, end); return client.getMultiBulkReply(); } public Long lrem(final String key, final long count, final String value) { client.lrem(key, count, value); return client.getIntegerReply(); } public String lset(final String key, final long index, final String value) { client.lset(key, index, value); return client.getStatusCodeReply(); } public String ltrim(final String key, final long start, final long end) { client.ltrim(key, start, end); return client.getStatusCodeReply(); } public String rpop(final String key) { client.rpop(key); return client.getBulkReply(); } public Long rpush(final String key, final String... strings) { client.rpush(key, strings); return client.getIntegerReply(); } public Long rpushx(final String key, final String string) { client.rpushx(key, string); return client.getIntegerReply(); } // //////////////////////////////////////////////////////////////////////////////////////////////////////// // Sets public Long sadd(final String key, final String... members) { client.sadd(key, members); return client.getIntegerReply(); } public Long scard(final String key) { client.scard(key); return client.getIntegerReply(); } public Boolean sismember(final String key, final String member) { client.sismember(key, member); return client.getIntegerReply() == 1; } public Set<String> smembers(final String key) { client.smembers(key); final List<String> members = client.getMultiBulkReply(); return new HashSet<String>(members); } public String srandmember(final String key) { client.srandmember(key); return client.getBulkReply(); } public List<String> srandmember(final String key, final int count) { client.srandmember(key, count); return client.getMultiBulkReply(); } public Long srem(final String key, final String... members) { client.srem(key, members); return client.getIntegerReply(); } // //////////////////////////////////////////////////////////////////////////////////////////////////////// // Sorted Sets public Long zadd(final String key, final double score, final String member) { client.zadd(key, score, member); return client.getIntegerReply(); } public Long zadd(final String key, final Map<Double, String> scoreMembers) { client.zadd(key, scoreMembers); return client.getIntegerReply(); } public Long zadd2(final String key, final Map<String, Double> scoreMembers) { client.zadd2(key, scoreMembers); return client.getIntegerReply(); } public Long zcard(final String key) { client.zcard(key); return client.getIntegerReply(); } public Long zcount(final String key, final double min, final double max) { client.zcount(key, min, max); return client.getIntegerReply(); } public Long zcount(final String key, final String min, final String max) { client.zcount(key, min, max); return client.getIntegerReply(); } public Double zincrby(final String key, final double score, final String member) { client.zincrby(key, score, member); String newscore = client.getBulkReply(); return Double.valueOf(newscore); } public Set<String> zrange(final String key, final long start, final long end) { client.zrange(key, start, end); final List<String> members = client.getMultiBulkReply(); return new LinkedHashSet<String>(members); } public Set<String> zrangeByScore(final String key, final double min, final double max) { client.zrangeByScore(key, min, max); return new LinkedHashSet<String>(client.getMultiBulkReply()); } <|fim▁hole|> client.zrangeByScore(key, min, max); return new LinkedHashSet<String>(client.getMultiBulkReply()); } public Set<String> zrangeByScore(final String key, final double min, final double max, final int offset, final int count) { client.zrangeByScore(key, min, max, offset, count); return new LinkedHashSet<String>(client.getMultiBulkReply()); } public Set<String> zrangeByScore(final String key, final String min, final String max, final int offset, final int count) { client.zrangeByScore(key, min, max, offset, count); return new LinkedHashSet<String>(client.getMultiBulkReply()); } public Set<Tuple> zrangeWithScores(final String key, final long start, final long end) { client.zrangeWithScores(key, start, end); Set<Tuple> set = getTupledSet(); return set; } public Set<Tuple> zrangeByScoreWithScores(final String key, final double min, final double max) { client.zrangeByScoreWithScores(key, min, max); Set<Tuple> set = getTupledSet(); return set; } public Set<Tuple> zrangeByScoreWithScores(final String key, final String min, final String max) { client.zrangeByScoreWithScores(key, min, max); Set<Tuple> set = getTupledSet(); return set; } public Set<Tuple> zrangeByScoreWithScores(final String key, final double min, final double max, final int offset, final int count) { client.zrangeByScoreWithScores(key, min, max, offset, count); Set<Tuple> set = getTupledSet(); return set; } public Set<Tuple> zrangeByScoreWithScores(final String key, final String min, final String max, final int offset, final int count) { client.zrangeByScoreWithScores(key, min, max, offset, count); Set<Tuple> set = getTupledSet(); return set; } public Long zrank(final String key, final String member) { client.zrank(key, member); return client.getIntegerReply(); } public Long zrem(final String key, final String... members) { client.zrem(key, members); return client.getIntegerReply(); } public Long zremrangeByRank(final String key, final long start, final long end) { client.zremrangeByRank(key, start, end); return client.getIntegerReply(); } public Long zremrangeByScore(final String key, final double start, final double end) { client.zremrangeByScore(key, start, end); return client.getIntegerReply(); } public Long zremrangeByScore(final String key, final String start, final String end) { client.zremrangeByScore(key, start, end); return client.getIntegerReply(); } public Set<String> zrevrange(final String key, final long start, final long end) { client.zrevrange(key, start, end); final List<String> members = client.getMultiBulkReply(); return new LinkedHashSet<String>(members); } public Set<Tuple> zrevrangeWithScores(final String key, final long start, final long end) { client.zrevrangeWithScores(key, start, end); Set<Tuple> set = getTupledSet(); return set; } public Set<String> zrevrangeByScore(final String key, final double max, final double min) { client.zrevrangeByScore(key, max, min); return new LinkedHashSet<String>(client.getMultiBulkReply()); } public Set<String> zrevrangeByScore(final String key, final String max, final String min) { client.zrevrangeByScore(key, max, min); return new LinkedHashSet<String>(client.getMultiBulkReply()); } public Set<String> zrevrangeByScore(final String key, final double max, final double min, final int offset, final int count) { client.zrevrangeByScore(key, max, min, offset, count); return new LinkedHashSet<String>(client.getMultiBulkReply()); } public Set<Tuple> zrevrangeByScoreWithScores(final String key, final double max, final double min) { client.zrevrangeByScoreWithScores(key, max, min); Set<Tuple> set = getTupledSet(); return set; } public Set<Tuple> zrevrangeByScoreWithScores(final String key, final double max, final double min, final int offset, final int count) { client.zrevrangeByScoreWithScores(key, max, min, offset, count); Set<Tuple> set = getTupledSet(); return set; } public Set<Tuple> zrevrangeByScoreWithScores(final String key, final String max, final String min, final int offset, final int count) { client.zrevrangeByScoreWithScores(key, max, min, offset, count); Set<Tuple> set = getTupledSet(); return set; } public Set<String> zrevrangeByScore(final String key, final String max, final String min, final int offset, final int count) { client.zrevrangeByScore(key, max, min, offset, count); return new LinkedHashSet<String>(client.getMultiBulkReply()); } public Set<Tuple> zrevrangeByScoreWithScores(final String key, final String max, final String min) { client.zrevrangeByScoreWithScores(key, max, min); Set<Tuple> set = getTupledSet(); return set; } public Long zrevrank(final String key, final String member) { client.zrevrank(key, member); return client.getIntegerReply(); } public Double zscore(final String key, final String member) { client.zscore(key, member); final String score = client.getBulkReply(); return (score != null ? new Double(score) : null); } // //////////////////////////////////////////////////////////////////////////////////////////////////////// // Connection public String ping() { client.ping(); return client.getStatusCodeReply(); } // //////////////////////////////////////////////////////////////////////////////////////////////////////// // Server public String info() { client.info(); return client.getBulkReply(); } public String info(final String section) { client.info(section); return client.getBulkReply(); } public Long dbSize() { client.dbSize(); return client.getIntegerReply(); } public byte[] dump(final String key) { client.dump(key); return client.getBinaryBulkReply(); } public String restore(final String key, final long ttl, final byte[] serializedValue) { client.restore(key, ttl, serializedValue); return client.getStatusCodeReply(); } /** * Quit. * * @return the string */ public String quit() { client.quit(); return client.getStatusCodeReply(); } /** * Connect. */ public void connect() { client.connect(); } /** * Disconnect. */ public void disconnect() { client.disconnect(); } /** * Checks if is connected. * * @return true, if is connected */ public boolean isConnected() { return client.isConnected(); } /** * Gets the tupled set. * * @return the tupled set */ private Set<Tuple> getTupledSet() { List<String> membersWithScores = client.getMultiBulkReply(); Set<Tuple> set = new LinkedHashSet<Tuple>(); Iterator<String> iterator = membersWithScores.iterator(); while (iterator.hasNext()) { set.add(new Tuple(iterator.next(), Double.valueOf(iterator.next()))); } return set; } }<|fim▁end|>
public Set<String> zrangeByScore(final String key, final String min, final String max) {
<|file_name|>InvalidJsonFLException.java<|end_file_name|><|fim▁begin|>/** * This file is part of JsonFL. * * JsonFL is free software: you can redistribute it and/or modify it under the * terms of the Lesser GNU General Public License as published by the Free * Software Foundation, either version 3 of the License, or (at your option) any * later version. * * JsonFL is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR * A PARTICULAR PURPOSE. See the Lesser GNU General Public License for more * details. * * You should have received a copy of the Lesser GNU General Public License<|fim▁hole|> * along with JsonFL. If not, see <http://www.gnu.org/licenses/>. */ package au.com.houliston.jsonfl; /** * Is thrown when the creation string for the JsonFL is invalid * * @author Trent Houliston * @version 1.0 */ public class InvalidJsonFLException extends Exception { /** * Creates a new InvalidJsonFLException with the passed message * * @param message The message to set */ public InvalidJsonFLException(String message) { super(message); } /** * Creates a new InvalidJsonFLException along with a message and a cause * * @param message The message to set * @param cause The root cause which made this exception */ public InvalidJsonFLException(String message, Throwable cause) { super(message, cause); } }<|fim▁end|>
<|file_name|>main.py<|end_file_name|><|fim▁begin|>""" """ from __future__ import unicode_literals, division, print_function, absolute_import import argparse import codecs import sys from sqlalchemy.engine import create_engine from sqlalchemy.schema import MetaData from sqlacodegen.codegen import CodeGenerator import sqlacodegen def main(): parser = argparse.ArgumentParser(description='Generates SQLAlchemy model code from an existing database.') parser.add_argument('url', nargs='?', help='SQLAlchemy url to the database') parser.add_argument('--version', action='store_true', help="print the version number and exit") parser.add_argument('--schema', help='load tables from an alternate schema') parser.add_argument('--tables', help='tables to process (comma-separated, default: all)') parser.add_argument('--noviews', action='store_true', help="ignore views") parser.add_argument('--noindexes', action='store_true', help='ignore indexes') parser.add_argument('--noconstraints', action='store_true', help='ignore constraints') parser.add_argument('--nojoined', action='store_true', help="don't autodetect joined table inheritance") parser.add_argument('--noinflect', action='store_true', help="don't try to convert tables names to singular form") parser.add_argument('--noclasses', action='store_true', help="don't generate classes, only tables") parser.add_argument('--alwaysclasses', action='store_true', help="always generate classes") parser.add_argument('--nosequences', action='store_true', help="don't auto-generate postgresql sequences") parser.add_argument('--outfile', help='file to write output to (default: stdout)')<|fim▁hole|> if args.version: print(sqlacodegen.version) return if not args.url: print('You must supply a url\n', file=sys.stderr) parser.print_help() return engine = create_engine(args.url) metadata = MetaData(engine) tables = args.tables.split(',') if args.tables else None metadata.reflect(engine, args.schema, not args.noviews, tables) outfile = codecs.open(args.outfile, 'w', encoding='utf-8') if args.outfile else sys.stdout generator = CodeGenerator(metadata, args.noindexes, args.noconstraints, args.nojoined, args.noinflect, args.noclasses, args.alwaysclasses, args.nosequences) generator.render(outfile)<|fim▁end|>
args = parser.parse_args()
<|file_name|>configurepanel.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------------- # Getting Things GNOME! - a personal organizer for the GNOME desktop # Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau # # This program is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program. If not, see <http://www.gnu.org/licenses/>. # ----------------------------------------------------------------------------- from gi.repository import Gtk from gettext import gettext as _ from GTG.gtk.backends.parameters_ui import ParametersUI from GTG.backends.backend_signals import BackendSignals class ConfigurePanel(Gtk.Box): """ A vertical Box that lets you configure a backend """ def __init__(self, backends): """ Constructor, creating all the gtk widgets @param backends: a reference to the dialog in which this is loaded """ super().__init__(orientation=Gtk.Orientation.VERTICAL) self.dialog = backends self.should_spinner_be_shown = False self.task_deleted_handle = None self.task_added_handle = None self.req = backends.get_requester() self._create_widgets() self._connect_signals() def _connect_signals(self): """ Connects the backends generated signals """ _signals = BackendSignals() _signals.connect(_signals.BACKEND_RENAMED, self.refresh_title) _signals.connect(_signals.BACKEND_STATE_TOGGLED, self.refresh_sync_status) _signals.connect(_signals.BACKEND_SYNC_STARTED, self.on_sync_started) _signals.connect(_signals.BACKEND_SYNC_ENDED, self.on_sync_ended) def _create_widgets(self): """ This function fills this box with widgets """ # Division of the available space in three segments: # top, middle and bottom (parameters_ui) top = Gtk.Box() top.set_spacing(6) middle = Gtk.Box() middle.set_spacing(6) self._fill_top_box(top) self._fill_middle_box(middle) self.pack_start(top, False, True, 0) self.pack_start(middle, False, True, 0) align = Gtk.Alignment.new(0, 0, 1, 0) align.set_padding(10, 0, 0, 0) self.parameters_ui = ParametersUI(self.req) align.add(self.parameters_ui) self.pack_start(align, False, True, 0) def _fill_top_box(self, box): """ Fill header with service's icon, name, and a spinner for inidcation of work. """ box.set_spacing(10) self.image_icon = Gtk.Image() self.image_icon.set_size_request(48, 48) self.human_name_label = Gtk.Label() self.human_name_label.set_alignment(xalign=0, yalign=0.5) # FIXME in the newer versions of GTK3 there always be Spinner! try: self.spinner = Gtk.Spinner() except AttributeError: # worarkound for archlinux: bug #624204 self.spinner = Gtk.Box() self.spinner.connect("show", self.on_spinner_show) self.spinner.set_size_request(32, 32) align_spin = Gtk.Alignment.new(1, 0, 0, 0) align_spin.add(self.spinner) box.pack_start(self.image_icon, False, True, 0) box.pack_start(self.human_name_label, True, True, 0) box.pack_start(align_spin, False, True, 0) def _fill_middle_box(self, box): """ Helper function to fill an box with a label and a button @param box: the Gtk.Box to fill """ self.sync_status_label = Gtk.Label() self.sync_status_label.set_alignment(xalign=0.8, yalign=0.5) self.sync_button = Gtk.Button() self.sync_button.connect("clicked", self.on_sync_button_clicked) box.pack_start(self.sync_status_label, True, True, 0) box.pack_start(self.sync_button, True, True, 0) def set_backend(self, backend_id): """Changes the backend to configure, refreshing this view. @param backend_id: the id of the backend to configure """ self.backend = self.dialog.get_requester().get_backend(backend_id) self.refresh_title() self.refresh_sync_status() self.parameters_ui.refresh(self.backend) self.image_icon.set_from_pixbuf(self.dialog.get_pixbuf_from_icon_name( self.backend.get_icon(), 48)) def refresh_title(self, sender=None, data=None): """ Callback for the signal that notifies backends name changes. It changes the title of this view @param sender: not used, here only for signal callback compatibility @param data: not used, here only for signal callback compatibility """ markup = "<big><big><big><b>%s</b></big></big></big>" % \ self.backend.get_human_name() self.human_name_label.set_markup(markup) def refresh_sync_button(self): """ Refreshes the state of the button that enables the backend """ self.sync_button.set_sensitive(not self.backend.is_default()) if self.backend.is_enabled(): label = _("Disable syncing") else: label = _("Enable syncing") self.sync_button.set_label(label) def refresh_sync_status_label(self): """ Refreshes the Gtk.Label that shows the current state of this backend """ if self.backend.is_default(): label = _("This is the default synchronization service") else: if self.backend.is_enabled(): label = _("Syncing is enabled.") else: label = _('Syncing is <span color="red">disabled</span>.') self.sync_status_label.set_markup(label) def refresh_sync_status(self, sender=False, data=False): """Signal callback function, called when a backend state (enabled/disabled) changes. Refreshes this view. @param sender: not used, here only for signal callback compatibility @param data: not used, here only for signal callback compatibility """ self.refresh_sync_button() self.refresh_sync_status_label() def on_sync_button_clicked(self, sender): """ Signal callback when a backend is enabled/disabled via the UI button @param sender: not used, here only for signal callback compatibility """ self.parameters_ui.commit_changes() self.req.set_backend_enabled(self.backend.get_id(), not self.backend.is_enabled()) def on_sync_started(self, sender, backend_id): """ If the backend has started syncing tasks, update the state of the Gtk.Spinner @param sender: not used, here only for signal callback compatibility @param backend_id: the id of the backend that emitted this signal """ if backend_id == self.backend.get_id(): self.spinner_set_active(True) def on_sync_ended(self, sender, backend_id): """ If the backend has stopped syncing tasks, update the state of the Gtk.Spinner @param sender: not used, here only for signal callback compatibility @param backend_id: the id of the backend that emitted this signal """ if backend_id == self.backend.get_id(): self.spinner_set_active(False) def on_spinner_show(self, sender): """This signal callback hides the spinner if it's not supposed to be seen. It's a workaround to let us call show_all on the whole window while keeping this hidden (it's the only widget that requires special attention) @param sender: not used, here only for signal callback compatibility """ if not self.should_spinner_be_shown:<|fim▁hole|> def spinner_set_active(self, active): """ Enables/disables the Gtk.Spinner, while showing/hiding it at the same time @param active: True if the spinner should spin """ self.should_spinner_be_shown = active if active: if isinstance(self.spinner, Gtk.Spinner): self.spinner.start() self.spinner.show() else: self.spinner.hide() if isinstance(self.spinner, Gtk.Spinner): self.spinner.stop()<|fim▁end|>
self.spinner.hide()
<|file_name|>powerConfigurator.py<|end_file_name|><|fim▁begin|>__author__ = 'mslabicki' import pygmo as pg # from pyltes.powerOptimizationProblemsDef import maximalThroughputProblemRR from pyltes.powerOptimizationProblemsDef import local_maximalThroughputProblemRR from pyltes.powerOptimizationProblemsDef import maximalMedianThrProblemRR from pyltes.powerOptimizationProblemsDef import local_maximalMedianThrProblemRR from pyltes.powerOptimizationProblemsDef import minInterQuartileRangeroblemRR from pyltes.powerOptimizationProblemsDef import local_minInterQuartileRangeroblemRR import copy import math import numpy as np class pygmoPowerConfigurator: def __init__(self,parent): <|fim▁hole|> if x_arg == None: x = self.parent.constraintAreaMaxX/2 else: x = x_arg if y_arg == None: y = self.parent.constraintAreaMaxY/2 else: y = y_arg if expectedSignalLoss_arg == None: maxDistance = min(self.parent.constraintAreaMaxX/2, self.parent.constraintAreaMaxY/2) else: maxDistance = returnDistanceFromSNR(expectedSignalLoss_arg) localBsVector = [] for bs in self.parent.bs: if math.sqrt((bs.x - x)**2 + (bs.y - y)**2) < maxDistance: row = [] row.append(int(bs.ID)) row.append(math.sqrt((bs.x - x)**2 + (bs.y - y)**2)) localBsVector.append(row) localBsVector = np.asarray(localBsVector) if objectiveFunction == "averageThr": if method == "local": localListBS = [] for i in range(len(localBsVector)): localListBS.append(localBsVector[i,0]) prob = pg.problem(local_maximalThroughputProblemRR(dim=len(localBsVector), networkInstance=self.parent, lowerTxLimit=self.parent.minTxPower, upperTxLimit=self.parent.maxTxPower, localListBS=localListBS)) if method == "global": prob = pg.problem(maximalThroughputProblemRR(dim=len(self.parent.bs), networkInstance=self.parent, lowerTxLimit=self.parent.minTxPower, upperTxLimit=self.parent.maxTxPower)) if objectiveFunction == "medianThr": if method == "local": localListBS = [] for i in range(len(localBsVector)): localListBS.append(localBsVector[i,0]) prob = pg.problem(local_maximalMedianThrProblemRR(dim=len(localBsVector), networkInstance=self.parent, lowerTxLimit=self.parent.minTxPower, upperTxLimit=self.parent.maxTxPower, localListBS=localListBS)) if method == "global": prob = pg.problem(maximalMedianThrProblemRR(dim=len(self.parent.bs), networkInstance=self.parent, lowerTxLimit=self.parent.minTxPower, upperTxLimit=self.parent.maxTxPower)) if objectiveFunction == "minIQRthr": if method == "local": localListBS = [] for i in range(len(localBsVector)): localListBS.append(localBsVector[i,0]) prob = pg.problem(local_minInterQuartileRangeroblemRR(dim=len(localBsVector), networkInstance=self.parent, lowerTxLimit=self.parent.minTxPower, upperTxLimit=self.parent.maxTxPower, localListBS=localListBS)) if method == "global": prob = pg.problem(minInterQuartileRangeroblemRR(dim=len(self.parent.bs), networkInstance=self.parent, lowerTxLimit=self.parent.minTxPower, upperTxLimit=self.parent.maxTxPower)) prob.siec = copy.deepcopy(self.parent) # algo = algorithm.sga(gen=sgaGenerations) algo = pg.algorithm(pg.sga(gen=sgaGenerations)) # archi = archipelago(algo, prob, numberOfThreads, numOfIndividuals, topology = topology.barabasi_albert()) # archi.evolve(evolveTimes) # archi.join() population = pg.population(prob, numOfIndividuals) population = algo.evolve(population) theBestCostF = 0 islandNumber = -1 islandCounter = 0 # for island in archi: # if theBestCostF > island.population.champion.f[0]: # theBestCostF = island.population.champion.f[0] # islandNumber = islandCounter # islandCounter = islandCounter + 1 if method == "global": for i in range(len(self.parent.bs)): self.parent.bs[i].outsidePower = population.champion_x[i] if method == "local": for i in range(len(localListBS)): # self.parent.bs[int(prob.bsList[i])].outsidePower = archi[islandNumber].population.champion.x[i] self.parent.bs[int(localListBS[i])].outsidePower = population.champion_x[i] return len(localBsVector) def returnDistanceFromSNR(expectedSignalLoss): lambda_val = 0.142758313333 a = 4.0 b = 0.0065 c = 17.1 d = 10.8 s = 15.8 ht = 40 hr = 1.5 f = 2.1 gamma = a - b*ht + c/ht Xf = 6 * math.log10( f/2 ) Xh = -d * math.log10( hr/2 ) R0 = 100.0 R0p = R0 * pow(10.0,-( (Xf+Xh) / (10*gamma) )) bandwidth=20 k = 1.3806488 * math.pow(10, -23) T = 293.0 BW = bandwidth * 1000 * 1000 N = 10*math.log10(k*T) + 10*math.log10(BW) alpha = 20 * math.log10( (4*math.pi*R0p) / lambda_val ) R = R0 * math.pow(10, (expectedSignalLoss - alpha-Xf-Xh-s - N)/(10*gamma)) return R<|fim▁end|>
self.parent = parent def findPowersRR(self, objectiveFunction="averageThr", sgaGenerations = 100, numberOfThreads = 11, numOfIndividuals = 10, evolveTimes = 10, method="global", x_arg=None, y_arg=None, expectedSignalLoss_arg=None): if method == "local":
<|file_name|>validator.go<|end_file_name|><|fim▁begin|>/* Copyright IBM Corp. 2016 All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package txvalidator import ( "fmt" "github.com/golang/protobuf/proto" "github.com/hyperledger/fabric/common/configtx" coreUtil "github.com/hyperledger/fabric/common/util" "github.com/hyperledger/fabric/core/chaincode/shim"<|fim▁hole|> "github.com/hyperledger/fabric/msp" "github.com/hyperledger/fabric/protos/common" "github.com/hyperledger/fabric/protos/utils" "github.com/op/go-logging" ) // Support provides all of the needed to evaluate the VSCC type Support interface { // Ledger returns the ledger associated with this validator Ledger() ledger.PeerLedger // MSPManager returns the MSP manager for this chain MSPManager() msp.MSPManager // Apply attempts to apply a configtx to become the new config Apply(configtx *common.ConfigEnvelope) error } //Validator interface which defines API to validate block transactions // and return the bit array mask indicating invalid transactions which // didn't pass validation. type Validator interface { Validate(block *common.Block) error } // private interface to decouple tx validator // and vscc execution, in order to increase // testability of txValidator type vsccValidator interface { VSCCValidateTx(payload *common.Payload, envBytes []byte) error } // vsccValidator implementation which used to call // vscc chaincode and validate block transactions type vsccValidatorImpl struct { support Support ccprovider ccprovider.ChaincodeProvider } // implementation of Validator interface, keeps // reference to the ledger to enable tx simulation // and execution of vscc type txValidator struct { support Support vscc vsccValidator } var logger *logging.Logger // package-level logger func init() { // Init logger with module name logger = logging.MustGetLogger("txvalidator") } // NewTxValidator creates new transactions validator func NewTxValidator(support Support) Validator { // Encapsulates interface implementation return &txValidator{support, &vsccValidatorImpl{support: support, ccprovider: ccprovider.GetChaincodeProvider()}} } func (v *txValidator) chainExists(chain string) bool { // TODO: implement this function! return true } func (v *txValidator) Validate(block *common.Block) error { logger.Debug("START Block Validation") defer logger.Debug("END Block Validation") txsfltr := ledgerUtil.NewFilterBitArray(uint(len(block.Data.Data))) for tIdx, d := range block.Data.Data { // Start by marking transaction as invalid, before // doing any validation checks. txsfltr.Set(uint(tIdx)) if d != nil { if env, err := utils.GetEnvelopeFromBlock(d); err != nil { logger.Warningf("Error getting tx from block(%s)", err) } else if env != nil { // validate the transaction: here we check that the transaction // is properly formed, properly signed and that the security // chain binding proposal to endorsements to tx holds. We do // NOT check the validity of endorsements, though. That's a // job for VSCC below logger.Debug("Validating transaction peer.ValidateTransaction()") var payload *common.Payload var err error if payload, err = validation.ValidateTransaction(env); err != nil { logger.Errorf("Invalid transaction with index %d, error %s", tIdx, err) continue } chain := payload.Header.ChannelHeader.ChannelId logger.Debug("Transaction is for chain %s", chain) if !v.chainExists(chain) { logger.Errorf("Dropping transaction for non-existent chain %s", chain) continue } if common.HeaderType(payload.Header.ChannelHeader.Type) == common.HeaderType_ENDORSER_TRANSACTION { // Check duplicate transactions txID := payload.Header.ChannelHeader.TxId if _, err := v.support.Ledger().GetTransactionByID(txID); err == nil { logger.Warning("Duplicate transaction found, ", txID, ", skipping") continue } //the payload is used to get headers logger.Debug("Validating transaction vscc tx validate") if err = v.vscc.VSCCValidateTx(payload, d); err != nil { txID := txID logger.Errorf("VSCCValidateTx for transaction txId = %s returned error %s", txID, err) continue } } else if common.HeaderType(payload.Header.ChannelHeader.Type) == common.HeaderType_CONFIGURATION_TRANSACTION { configEnvelope, err := configtx.UnmarshalConfigEnvelope(payload.Data) if err != nil { err := fmt.Errorf("Error unmarshaling config which passed initial validity checks: %s", err) logger.Critical(err) return err } if err := v.support.Apply(configEnvelope); err != nil { err := fmt.Errorf("Error validating config which passed initial validity checks: %s", err) logger.Critical(err) return err } logger.Debugf("config transaction received for chain %s", chain) } if _, err := proto.Marshal(env); err != nil { logger.Warningf("Cannot marshal transaction due to %s", err) continue } // Succeeded to pass down here, transaction is valid, // just unset the filter bit array flag. txsfltr.Unset(uint(tIdx)) } else { logger.Warning("Nil tx from block") } } } // Initialize metadata structure utils.InitBlockMetadata(block) // Serialize invalid transaction bit array into block metadata field block.Metadata.Metadata[common.BlockMetadataIndex_TRANSACTIONS_FILTER] = txsfltr.ToBytes() return nil } func (v *vsccValidatorImpl) VSCCValidateTx(payload *common.Payload, envBytes []byte) error { // Chain ID chainID := payload.Header.ChannelHeader.ChannelId if chainID == "" { err := fmt.Errorf("transaction header does not contain an chain ID") logger.Errorf("%s", err) return err } // Get transaction id txid := payload.Header.ChannelHeader.TxId logger.Info("[XXX remove me XXX] Transaction type,", common.HeaderType(payload.Header.ChannelHeader.Type)) if txid == "" { err := fmt.Errorf("transaction header does not contain transaction ID") logger.Errorf("%s", err) return err } ctxt, err := v.ccprovider.GetContext(v.support.Ledger()) if err != nil { logger.Errorf("Cannot obtain context for txid=%s, err %s", txid, err) return err } defer v.ccprovider.ReleaseContext() // get header extensions so we have the visibility field hdrExt, err := utils.GetChaincodeHeaderExtension(payload.Header) if err != nil { return err } // LCCC should not undergo standard VSCC type of // validation. It should instead go through system // policy validation to determine whether the issuer // is entitled to deploy a chaincode on our chain // VSCCValidateTx should if hdrExt.ChaincodeId.Name == "lccc" { // TODO: until FAB-1934 is in, we need to stop here logger.Infof("Invocation of LCCC detected, no further VSCC validation necessary") return nil } // obtain name of the VSCC and the policy from LCCC vscc, policy, err := v.ccprovider.GetCCValidationInfoFromLCCC(ctxt, txid, nil, chainID, hdrExt.ChaincodeId.Name) if err != nil { logger.Errorf("Unable to get chaincode data from LCCC for txid %s, due to %s", txid, err) return err } // build arguments for VSCC invocation // args[0] - function name (not used now) // args[1] - serialized Envelope // args[2] - serialized policy args := [][]byte{[]byte(""), envBytes, policy} vscctxid := coreUtil.GenerateUUID() // Get chaincode version version := coreUtil.GetSysCCVersion() cccid := v.ccprovider.GetCCContext(chainID, vscc, version, vscctxid, true, nil) // invoke VSCC logger.Info("Invoking VSCC txid", txid, "chaindID", chainID) res, _, err := v.ccprovider.ExecuteChaincode(ctxt, cccid, args) if err != nil { logger.Errorf("Invoke VSCC failed for transaction txid=%s, error %s", txid, err) return err } if res.Status != shim.OK { logger.Errorf("VSCC check failed for transaction txid=%s, error %s", txid, res.Message) return fmt.Errorf("%s", res.Message) } return nil }<|fim▁end|>
"github.com/hyperledger/fabric/core/common/ccprovider" "github.com/hyperledger/fabric/core/common/validation" "github.com/hyperledger/fabric/core/ledger" ledgerUtil "github.com/hyperledger/fabric/core/ledger/util"
<|file_name|>jscouch.documents.js<|end_file_name|><|fim▁begin|>/* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Initial implementation: * http://www.mudynamics.com * http://labs.mudynamics.com * http://www.pcapr.net */ (function($) { var now = new Date().getTime(); var millisInHHour = 1000*60*30; $.jscouch = $.jscouch || {}; $.jscouch.documents = $.jscouch.documents || {}; $.extend($.jscouch.documents, { load: function() { // popluate the DB with initial entries $.jscouch.couchdb.put({ name: 'fish.jpg', created_at: new Date(now + millisInHHour*Math.random()).toUTCString(), user: 'bob', type: 'jpeg', camera: 'nikon', info: { width: 100, height: 200, size: 12345 }, tags: [ 'tuna', 'shark' ] }); $.jscouch.couchdb.put({ name: 'trees.jpg', created_at: new Date(now + millisInHHour*Math.random()).toUTCString(), user: 'john', type: 'jpeg', camera: 'canon', info: { width: 30, height: 250, <|fim▁hole|> }, tags: [ 'oak' ] }); $.jscouch.couchdb.put({ name: 'snow.png', created_at: new Date(now + millisInHHour*Math.random()).toUTCString(), user: 'john', type: 'png', camera: 'canon', info: { width: 64, height: 64, size: 1253 }, tags: [ 'tahoe', 'powder' ] }); $.jscouch.couchdb.put({ name: 'hawaii.png', created_at: new Date(now + millisInHHour*Math.random()).toUTCString(), user: 'john', type: 'png', camera: 'nikon', info: { width: 128, height: 64, size: 92834 }, tags: [ 'maui', 'tuna' ] }); $.jscouch.couchdb.put({ name: 'hawaii.gif', created_at: new Date(now + millisInHHour*Math.random()).toUTCString(), user: 'bob', type: 'gif', camera: 'canon', info: { width: 320, height: 128, size: 49287 }, tags: [ 'maui' ] }); $.jscouch.couchdb.put({ name: 'island.gif', created_at: new Date(now + millisInHHour*Math.random()).toUTCString(), user: 'zztop', type: 'gif', camera: 'nikon', info: { width: 640, height: 480, size: 50398 }, tags: [ 'maui' ] }); } }); })(jQuery);<|fim▁end|>
size: 32091
<|file_name|>CarEntityTests.java<|end_file_name|><|fim▁begin|>package org.softuni.mostwanted.entity; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.softuni.mostwanted.util.ReflectionUtil; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import javax.persistence.Column; import javax.persistence.Id; import javax.persistence.ManyToOne; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Arrays; @RunWith(SpringJUnit4ClassRunner.class) public class CarEntityTests { private static final String CLASS_NAME = "Car"; private Class<?> entityClass; @Before public void setUp() { entityClass = new ReflectionUtil().getTestClass(CLASS_NAME); } @Test public void carEntity_ShouldHaveIdAnnotation() { Field[] entityFields = entityClass.getDeclaredFields(); Method[] entityMethods = entityClass.getDeclaredMethods(); boolean hasId = Arrays.stream(entityFields) .anyMatch(ef -> ef.isAnnotationPresent(Id.class) && ef.getName().equals("id")) || Arrays.stream(entityMethods) .anyMatch(em -> em.isAnnotationPresent(Id.class) && em.getName().equals("getId")); Assert.assertTrue("No Id annotation detected on " + CLASS_NAME + " binding.", hasId); } @Test public void carEntity_BrandFieldShouldBeRequired() { Field[] entityFields = entityClass.getDeclaredFields(); Method[] entityMethods = entityClass.getDeclaredMethods(); String fieldName = "brand"; String getterName = "get" + (fieldName.charAt(0) + "").toUpperCase() + fieldName.substring(1); boolean hasNameRequired = Arrays.stream(entityFields) .anyMatch(ef -> ef.isAnnotationPresent(Column.class) && !ef.getAnnotation(Column.class).nullable() && ef.getName().equals(fieldName)) || Arrays.stream(entityMethods) .anyMatch(em -> em.isAnnotationPresent(Column.class) && !em.getAnnotation(Column.class).nullable() && em.getName().equals(getterName)); Assert.assertTrue("No Required configuration on \"" + fieldName + "\" field detected in " + CLASS_NAME + " binding.", hasNameRequired); } @Test public void carEntity_ModelFieldShouldBeRequired() { Field[] entityFields = entityClass.getDeclaredFields(); Method[] entityMethods = entityClass.getDeclaredMethods(); String fieldName = "model"; String getterName = "get" + (fieldName.charAt(0) + "").toUpperCase() + fieldName.substring(1); boolean hasNameRequired = Arrays.stream(entityFields) .anyMatch(ef -> ef.isAnnotationPresent(Column.class) && !ef.getAnnotation(Column.class).nullable() && ef.getName().equals(fieldName)) || Arrays.stream(entityMethods) .anyMatch(em -> em.isAnnotationPresent(Column.class) && !em.getAnnotation(Column.class).nullable() && em.getName().equals(getterName)); Assert.assertTrue("No Required configuration on \"" + fieldName + "\" field detected in " + CLASS_NAME + " binding.", hasNameRequired); } @Test public void carEntity_YearOfProductionFieldShouldBeRequired() { Field[] entityFields = entityClass.getDeclaredFields(); Method[] entityMethods = entityClass.getDeclaredMethods(); String fieldName = "yearOfProduction"; String getterName = "get" + (fieldName.charAt(0) + "").toUpperCase() + fieldName.substring(1); boolean hasNameRequired = Arrays.stream(entityFields) .anyMatch(ef -> ef.isAnnotationPresent(Column.class) && !ef.getAnnotation(Column.class).nullable() && ef.getName().equals(fieldName))<|fim▁hole|> Arrays.stream(entityMethods) .anyMatch(em -> em.isAnnotationPresent(Column.class) && !em.getAnnotation(Column.class).nullable() && em.getName().equals(getterName)); Assert.assertTrue("No Required configuration on \"" + fieldName + "\" field detected in " + CLASS_NAME + " binding.", hasNameRequired); } @Test public void carEntity_RacerFieldShouldHaveRelationshipAnnotation() { Field[] entityFields = entityClass.getDeclaredFields(); Method[] entityMethods = entityClass.getDeclaredMethods(); String fieldName = "racer"; String getterName = "get" + (fieldName.charAt(0) + "").toUpperCase() + fieldName.substring(1); Class relationshipClass = ManyToOne.class; boolean hasNameRequired = Arrays.stream(entityFields) .anyMatch(ef -> ef.isAnnotationPresent(relationshipClass) && ef.getName().equals(fieldName)) || Arrays.stream(entityMethods) .anyMatch(em -> em.isAnnotationPresent(relationshipClass) && em.getName().equals(getterName)); Assert.assertTrue("No Relationship configuration on \"" + fieldName + "\" field detected in " + CLASS_NAME + " binding.", hasNameRequired); } }<|fim▁end|>
||
<|file_name|>ui.go<|end_file_name|><|fim▁begin|>package ui import ( "image" "image/color" "image/draw" ) // UI represents an instance of the UI type UI struct { component WindowTitle string Input Input keyFuncs map[Key]func() error prevX, prevY int } // New creates a new UI instance func New(width, height int) *UI { rect := image.Rect(0, 0, width, height) ui := UI{ WindowTitle: "ui window", keyFuncs: make(map[Key]func() error), } ui.Dimension = Dimension{Width: width, Height: height} ui.Image = image.NewRGBA(rect) return &ui } // AddKeyFunc registers a function to run on key press func (ui *UI) AddKeyFunc(key Key, fnc func() error) { ui.keyFuncs[key] = fnc } // Update is called on every frame from the ebiten.Run update callback func (ui *UI) Update() error { ui.Input.updateMouse() ui.handleClick() if err := ui.handleKeypress(); err != nil { return err } return nil } // SetWindowTitle sets the title of the application window<|fim▁hole|> ui.WindowTitle = s } // AddComponent adds a component to the ui func (ui *UI) AddComponent(o Component) { ui.addChild(o) } // Render returns a fresh frame of the GUI. mx, my is absolute mouse position func (ui *UI) Render() image.Image { mx := ui.Input.X my := ui.Input.Y if ui.isChildrenClean() { if mx == ui.prevX && my == ui.prevY { return ui.Image } if (mx < 0 || mx > ui.Dimension.Width) || (my < 0 || my > ui.Dimension.Height) { // cursor outside window will not change hover state return ui.Image } } ui.prevX = mx ui.prevY = my whole := image.Rect(0, 0, ui.Dimension.Width, ui.Dimension.Height) draw.Draw(ui.Image, whole, &image.Uniform{color.Transparent}, image.ZP, draw.Src) ui.drawChildren(mx, my) ui.drawTooltips(mx, my) return ui.Image } // mx, my is absolute mouse position func (ui *UI) drawTooltips(mx, my int) { for _, child := range ui.children { if grp, ok := child.(*Group); ok { for _, gchild := range grp.children { ui.drawTooltip(gchild, mx, my, mx-grp.Position.X, my-grp.Position.Y) } } ui.drawTooltip(child, mx, my, mx, my) } } func (ui *UI) drawTooltip(child Component, mx, my, relx, rely int) { r := child.GetBounds() child.Hover(relx >= r.Min.X && relx <= r.Max.X && rely >= r.Min.Y && rely <= r.Max.Y) tooltip := child.Tooltip() if child.IsMouseOver() && tooltip != nil { tooltip.Move(mx, my) tr := tooltip.GetBounds() draw.Draw(ui.Image, tr, tooltip.Draw(relx, rely), image.ZP, draw.Over) } } // IsClean returns true if all UI components are clean func (ui *UI) IsClean() bool { for _, c := range ui.children { if !c.IsClean() { return false } } return true } // handleKeypress runs corresponding function when a key is pressed func (ui *UI) handleKeypress() error { ui.Input.updateKeyboard() for key, fnc := range ui.keyFuncs { if ui.Input.StateForKey(key) { if err := fnc(); err != nil { return err } } } return nil }<|fim▁end|>
func (ui *UI) SetWindowTitle(s string) {
<|file_name|>backend.rs<|end_file_name|><|fim▁begin|>//! Abstraction around the object writing crate use std::convert::{TryFrom, TryInto}; use rustc_data_structures::fx::FxHashMap; use rustc_session::Session; use cranelift_codegen::isa::TargetIsa; use cranelift_module::FuncId; use cranelift_object::{ObjectBuilder, ObjectModule, ObjectProduct}; use object::write::*; use object::{RelocationEncoding, SectionKind, SymbolFlags}; use gimli::SectionId; use crate::debuginfo::{DebugReloc, DebugRelocName}; pub(crate) trait WriteMetadata { fn add_rustc_section(&mut self, symbol_name: String, data: Vec<u8>); } impl WriteMetadata for object::write::Object { fn add_rustc_section(&mut self, symbol_name: String, data: Vec<u8>) { let segment = self.segment_name(object::write::StandardSegment::Data).to_vec(); let section_id = self.add_section(segment, b".rustc".to_vec(), object::SectionKind::Data); let offset = self.append_section_data(section_id, &data, 1); // For MachO and probably PE this is necessary to prevent the linker from throwing away the // .rustc section. For ELF this isn't necessary, but it also doesn't harm. self.add_symbol(object::write::Symbol { name: symbol_name.into_bytes(), value: offset, size: data.len() as u64, kind: object::SymbolKind::Data, scope: object::SymbolScope::Dynamic, weak: false, section: SymbolSection::Section(section_id), flags: SymbolFlags::None, }); } } pub(crate) trait WriteDebugInfo { type SectionId: Copy; fn add_debug_section(&mut self, name: SectionId, data: Vec<u8>) -> Self::SectionId; fn add_debug_reloc( &mut self, section_map: &FxHashMap<SectionId, Self::SectionId>, from: &Self::SectionId, reloc: &DebugReloc, ); } impl WriteDebugInfo for ObjectProduct { type SectionId = (object::write::SectionId, object::write::SymbolId); fn add_debug_section( &mut self, id: SectionId, data: Vec<u8>, ) -> (object::write::SectionId, object::write::SymbolId) { let name = if self.object.format() == object::BinaryFormat::MachO { id.name().replace('.', "__") // machO expects __debug_info instead of .debug_info } else { id.name().to_string() } .into_bytes(); let segment = self.object.segment_name(StandardSegment::Debug).to_vec(); // FIXME use SHT_X86_64_UNWIND for .eh_frame let section_id = self.object.add_section( segment, name, if id == SectionId::EhFrame { SectionKind::ReadOnlyData } else { SectionKind::Debug }, ); self.object .section_mut(section_id) .set_data(data, if id == SectionId::EhFrame { 8 } else { 1 }); let symbol_id = self.object.section_symbol(section_id); (section_id, symbol_id) } fn add_debug_reloc( &mut self, section_map: &FxHashMap<SectionId, Self::SectionId>, from: &Self::SectionId, reloc: &DebugReloc,<|fim▁hole|> DebugRelocName::Symbol(id) => { let symbol_id = self.function_symbol(FuncId::from_u32(id.try_into().unwrap())); self.object .symbol_section_and_offset(symbol_id) .expect("Debug reloc for undef sym???") } }; self.object .add_relocation( from.0, Relocation { offset: u64::from(reloc.offset), symbol, kind: reloc.kind, encoding: RelocationEncoding::Generic, size: reloc.size * 8, addend: i64::try_from(symbol_offset).unwrap() + reloc.addend, }, ) .unwrap(); } } pub(crate) fn with_object(sess: &Session, name: &str, f: impl FnOnce(&mut Object)) -> Vec<u8> { let triple = crate::target_triple(sess); let binary_format = match triple.binary_format { target_lexicon::BinaryFormat::Elf => object::BinaryFormat::Elf, target_lexicon::BinaryFormat::Coff => object::BinaryFormat::Coff, target_lexicon::BinaryFormat::Macho => object::BinaryFormat::MachO, binary_format => sess.fatal(&format!("binary format {} is unsupported", binary_format)), }; let architecture = match triple.architecture { target_lexicon::Architecture::X86_32(_) => object::Architecture::I386, target_lexicon::Architecture::X86_64 => object::Architecture::X86_64, target_lexicon::Architecture::Arm(_) => object::Architecture::Arm, target_lexicon::Architecture::Aarch64(_) => object::Architecture::Aarch64, architecture => { sess.fatal(&format!("target architecture {:?} is unsupported", architecture,)) } }; let endian = match triple.endianness().unwrap() { target_lexicon::Endianness::Little => object::Endianness::Little, target_lexicon::Endianness::Big => object::Endianness::Big, }; let mut metadata_object = object::write::Object::new(binary_format, architecture, endian); metadata_object.add_file_symbol(name.as_bytes().to_vec()); f(&mut metadata_object); metadata_object.write().unwrap() } pub(crate) fn make_module(sess: &Session, isa: Box<dyn TargetIsa>, name: String) -> ObjectModule { let mut builder = ObjectBuilder::new(isa, name + ".o", cranelift_module::default_libcall_names()).unwrap(); // Unlike cg_llvm, cg_clif defaults to disabling -Zfunction-sections. For cg_llvm binary size // is important, while cg_clif cares more about compilation times. Enabling -Zfunction-sections // can easily double the amount of time necessary to perform linking. builder.per_function_section(sess.opts.debugging_opts.function_sections.unwrap_or(false)); ObjectModule::new(builder) }<|fim▁end|>
) { let (symbol, symbol_offset) = match reloc.name { DebugRelocName::Section(id) => (section_map.get(&id).unwrap().1, 0),
<|file_name|>fmt.rs<|end_file_name|><|fim▁begin|>#![feature(core)] extern crate core; #[cfg(test)] mod tests { // pub trait FixedSizeArray<T> { // /// Converts the array to immutable slice // fn as_slice(&self) -> &[T]; // /// Converts the array to mutable slice // fn as_mut_slice(&mut self) -> &mut [T]; // } // macro_rules! array_impls { // ($($N:expr)+) => { // $( // #[unstable(feature = "core")] // impl<T> FixedSizeArray<T> for [T; $N] { // #[inline] // fn as_slice(&self) -> &[T] { // &self[..] // } // #[inline] // fn as_mut_slice(&mut self) -> &mut [T] { // &mut self[..] // } // } // // #[unstable(feature = "array_as_ref", // reason = "should ideally be implemented for all fixed-sized arrays")] // impl<T> AsRef<[T]> for [T; $N] { // #[inline] // fn as_ref(&self) -> &[T] { // &self[..] // } // } // // #[unstable(feature = "array_as_ref", // reason = "should ideally be implemented for all fixed-sized arrays")] // impl<T> AsMut<[T]> for [T; $N] { // #[inline] // fn as_mut(&mut self) -> &mut [T] { // &mut self[..] // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:Copy> Clone for [T; $N] { // fn clone(&self) -> [T; $N] { // *self // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T: Hash> Hash for [T; $N] { // fn hash<H: hash::Hasher>(&self, state: &mut H) { // Hash::hash(&self[..], state) // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T: fmt::Debug> fmt::Debug for [T; $N] { // fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // fmt::Debug::fmt(&&self[..], f) // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<'a, T> IntoIterator for &'a [T; $N] { // type Item = &'a T; // type IntoIter = Iter<'a, T>; // // fn into_iter(self) -> Iter<'a, T> { // self.iter() // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<'a, T> IntoIterator for &'a mut [T; $N] { // type Item = &'a mut T; // type IntoIter = IterMut<'a, T>; // // fn into_iter(self) -> IterMut<'a, T> { // self.iter_mut() // } // } // // // NOTE: some less important impls are omitted to reduce code bloat // __impl_slice_eq1! { [A; $N], [B; $N] } // __impl_slice_eq2! { [A; $N], [B] } // __impl_slice_eq2! { [A; $N], &'b [B] } // __impl_slice_eq2! { [A; $N], &'b mut [B] } // // __impl_slice_eq2! { [A; $N], &'b [B; $N] } // // __impl_slice_eq2! { [A; $N], &'b mut [B; $N] } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:Eq> Eq for [T; $N] { } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:PartialOrd> PartialOrd for [T; $N] { // #[inline] // fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> { // PartialOrd::partial_cmp(&&self[..], &&other[..]) // } // #[inline] // fn lt(&self, other: &[T; $N]) -> bool { // PartialOrd::lt(&&self[..], &&other[..]) // } // #[inline] // fn le(&self, other: &[T; $N]) -> bool { // PartialOrd::le(&&self[..], &&other[..]) // } // #[inline] // fn ge(&self, other: &[T; $N]) -> bool { // PartialOrd::ge(&&self[..], &&other[..]) // } // #[inline] // fn gt(&self, other: &[T; $N]) -> bool { // PartialOrd::gt(&&self[..], &&other[..]) // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:Ord> Ord for [T; $N] { // #[inline] // fn cmp(&self, other: &[T; $N]) -> Ordering { // Ord::cmp(&&self[..], &&other[..]) // } // } // )+ // } // } // array_impls! { // 0 1 2 3 4 5 6 7 8 9 // 10 11 12 13 14 15 16 17 18 19 // 20 21 22 23 24 25 26 27 28 29 // 30 31 32 // } // impl<T: fmt::Debug> fmt::Debug for [T; $N] { // fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // fmt::Debug::fmt(&&self[..], f)<|fim▁hole|> type T = i32; #[test] fn fmt_test1() { let array: [T; 3] = [ 0, 1, 2 ]; let message: String = format!("{:?}", array); assert_eq!(message, "[0, 1, 2]".to_string()); } }<|fim▁end|>
// } // }
<|file_name|>issue-18400.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. trait Set<T> { fn contains(&self, T) -> bool; fn set(&mut self, T); } impl<'a, T, S> Set<&'a [T]> for S where T: Copy, S: Set<T>, { fn contains(&self, bits: &[T]) -> bool { bits.iter().all(|&bit| self.contains(bit)) } <|fim▁hole|> self.set(bit) } } } fn main() { let bits: &[_] = &[0, 1]; 0.contains(bits); //~^ ERROR the trait `Set<_>` is not implemented for the type `_` }<|fim▁end|>
fn set(&mut self, bits: &[T]) { for &bit in bits.iter() {
<|file_name|>builddeb.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os import sys try: import py2deb except ImportError: import fake_py2deb as py2deb import constants __app_name__ = constants.__app_name__ __description__ = """Very simple Audiobook player. Supports playing, pausing, seeking (sort of) and saving state when changing book/closing. Plays books arranged as dirs under myDocs/Audiobooks . Homepage: http://wiki.maemo.org/Nqaap""" __author__ = "Soeren 'Pengman' Pedersen" __email__ = "[email protected]" __version__ = constants.__version__ __build__ = constants.__build__ __changelog__ = """ * More unicode improvements """.strip() __postinstall__ = """#!/bin/sh -e gtk-update-icon-cache -f /usr/share/icons/hicolor rm -f ~/.%(name)s/%(name)s.log """ % {"name": constants.__app_name__} def find_files(prefix, path): for root, dirs, files in os.walk(path): for file in files: if file.startswith(prefix+"-"): fileParts = file.split("-") unused, relPathParts, newName = fileParts[0], fileParts[1:-1], fileParts[-1] assert unused == prefix relPath = os.sep.join(relPathParts) yield relPath, file, newName def unflatten_files(files): d = {} for relPath, oldName, newName in files: if relPath not in d: d[relPath] = [] d[relPath].append((oldName, newName)) return d def build_package(distribution): try: os.chdir(os.path.dirname(sys.argv[0])) except: pass py2deb.Py2deb.SECTIONS = py2deb.SECTIONS_BY_POLICY[distribution] p = py2deb.Py2deb(__app_name__) p.prettyName = constants.__pretty_app_name__ p.description = __description__ p.bugTracker="https://bugs.maemo.org/enter_bug.cgi?product=nQa%%20Audiobook%%20Player" p.author = __author__ p.mail = __email__ p.license = "lgpl" p.depends = ", ".join([ "python2.6 | python2.5", "python-gtk2 | python2.5-gtk2", "python-dbus | python2.5-dbus", "python-telepathy | python2.5-telepathy", "python-gobject | python2.5-gobject", "python-simplejson", ]) maemoSpecificDepends = ", python-osso | python2.5-osso, python-hildon | python2.5-hildon" p.depends += { "debian": ", python-gst0.10", "diablo": maemoSpecificDepends, "fremantle": maemoSpecificDepends + ", python-gst0.10", }[distribution] p.section = { "debian": "sound", "diablo": "user/multimedia", "fremantle": "user/multimedia", }[distribution] p.arch="all" p.urgency="low" p.distribution=distribution p.repository="extras" p.changelog = __changelog__ p.postinstall = __postinstall__ p.icon = { "debian": "26x26-%s.png" % constants.__app_name__, "diablo": "26x26-%s.png" % constants.__app_name__, "fremantle": "48x48-%s.png" % constants.__app_name__, }[distribution] p["/opt/%s/bin" % constants.__app_name__] = [ "%s.py" % constants.__app_name__ ] for relPath, files in unflatten_files(find_files("src", ".")).iteritems(): fullPath = "/opt/%s/lib" % constants.__app_name__ if relPath: fullPath += os.sep+relPath p[fullPath] = list( "|".join((oldName, newName)) for (oldName, newName) in files ) p["/usr/share/applications/hildon"] = ["%s.desktop" % constants.__app_name__] p["/usr/share/icons/hicolor/26x26/hildon"] = ["26x26-%s.png|%s.png" % (constants.__app_name__, constants.__app_name__)] p["/usr/share/icons/hicolor/48x48/hildon"] = ["48x48-%s.png|%s.png" % (constants.__app_name__, constants.__app_name__)] p["/usr/share/icons/hicolor/64x64/hildon"] = ["64x64-%s.png|%s.png" % (constants.__app_name__, constants.__app_name__)] p["/usr/share/icons/hicolor/scalable/hildon"] = ["scale-%s.png|%s.png" % (constants.__app_name__, constants.__app_name__)] print p if distribution == "debian": print p.generate( version="%s-%s" % (__version__, __build__), changelog=__changelog__, build=True, tar=False, changes=False, dsc=False, ) else: print p.generate( version="%s-%s" % (__version__, __build__), changelog=__changelog__, build=False, tar=True, changes=True, dsc=True, ) print "Building for %s finished" % distribution if __name__ == "__main__": <|fim▁hole|> if len(sys.argv) == 1: distribution = "fremantle" else: distribution = sys.argv[1] build_package(distribution)<|fim▁end|>
<|file_name|>clusterrole_handler.go<|end_file_name|><|fim▁begin|>package rbac import ( v3 "github.com/rancher/rancher/pkg/generated/norman/management.cattle.io/v3" typesrbacv1 "github.com/rancher/rancher/pkg/generated/norman/rbac.authorization.k8s.io/v1" rbacv1 "k8s.io/api/rbac/v1" apierrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" ) type crHandler struct { clusterRoles typesrbacv1.ClusterRoleInterface roleTemplateLister v3.RoleTemplateLister } func newClusterRoleHandler(r *manager) *crHandler { return &crHandler{ clusterRoles: r.clusterRoles, roleTemplateLister: r.rtLister,<|fim▁hole|>// sync validates that a clusterRole's parent roleTemplate still exists in management // and will remove the clusterRole if the roleTemplate no longer exists. func (c *crHandler) sync(key string, obj *rbacv1.ClusterRole) (runtime.Object, error) { if key == "" || obj == nil { return nil, nil } if owner, ok := obj.Annotations[clusterRoleOwner]; ok { _, err := c.roleTemplateLister.Get("", owner) if err != nil { if apierrors.IsNotFound(err) { return obj, c.clusterRoles.Delete(obj.Name, &metav1.DeleteOptions{}) } return obj, err } } return obj, nil }<|fim▁end|>
} }
<|file_name|>vite-plugin-react-svg-tests.ts<|end_file_name|><|fim▁begin|>import { PluginOption } from 'vite'; import reactSvgPlugin = require('vite-plugin-react-svg'); const testCases: Array<PluginOption | PluginOption[]> = [<|fim▁hole|> expandProps: 'start', memo: false, ref: false, replaceAttrValues: { someKey: 42, anotherKey: 'value', }, svgProps: { role: 'alert', }, svgo: true, svgoConfig: { datauri: 'base64', }, titleProp: true, }), reactSvgPlugin({ defaultExport: 'url', expandProps: 'end', }), reactSvgPlugin({ expandProps: false, }), reactSvgPlugin({ // $ExpectError expandProps: true, }), reactSvgPlugin({ // $ExpectError expandProps: 'en', }), ];<|fim▁end|>
reactSvgPlugin({ defaultExport: 'component',
<|file_name|>test_guid_auto_include.py<|end_file_name|><|fim▁begin|>from django.utils import timezone import pytest from django_bulk_update.helper import bulk_update from django.db.models import DateTimeField from osf_tests.factories import UserFactory, PreprintFactory, NodeFactory @pytest.mark.django_db class TestGuidAutoInclude: guid_factories = [ UserFactory, PreprintFactory, NodeFactory ] @pytest.mark.parametrize('Factory', guid_factories) def test_filter_object(self, Factory): obj = Factory() assert '__guids' in str(obj._meta.model.objects.filter(id=obj.id).query), 'Guids were not included in filter query for {}'.format(obj._meta.model.__name__) @pytest.mark.parametrize('Factory', guid_factories) @pytest.mark.django_assert_num_queries def test_all(self, Factory, django_assert_num_queries): for _ in range(0, 5): UserFactory() with django_assert_num_queries(1): wut = Factory._meta.model.objects.all() for x in wut: assert x._id is not None, 'Guid was None' @pytest.mark.parametrize('Factory', guid_factories) @pytest.mark.django_assert_num_queries def test_filter(self, Factory, django_assert_num_queries): objects = [] for _ in range(0, 5): objects.append(Factory()) new_ids = [o.id for o in objects] with django_assert_num_queries(1): wut = Factory._meta.model.objects.filter(id__in=new_ids) for x in wut: assert x._id is not None, 'Guid was None' @pytest.mark.parametrize('Factory', guid_factories) @pytest.mark.django_assert_num_queries def test_filter_order_by(self, Factory, django_assert_num_queries): objects = [] for _ in range(0, 5): objects.append(Factory()) new_ids = [o.id for o in objects] with django_assert_num_queries(1): wut = Factory._meta.model.objects.filter(id__in=new_ids).order_by('id') for x in wut: assert x._id is not None, 'Guid was None' @pytest.mark.parametrize('Factory', guid_factories) @pytest.mark.django_assert_num_queries def test_values(self, Factory, django_assert_num_queries): objects = [] for _ in range(0, 5): objects.append(Factory()) with django_assert_num_queries(1): wut = Factory._meta.model.objects.values('id') for x in wut: assert len(x) == 1, 'Too many keys in values' @pytest.mark.parametrize('Factory', guid_factories) @pytest.mark.django_assert_num_queries def test_exclude(self, Factory, django_assert_num_queries): objects = [] for _ in range(0, 5): objects.append(Factory()) try: dtfield = [x.name for x in objects[0]._meta.get_fields() if isinstance(x, DateTimeField)][0] except IndexError: pytest.skip('Thing doesn\'t have a DateTimeField') with django_assert_num_queries(1): wut = Factory._meta.model.objects.exclude(**{dtfield: timezone.now()}) for x in wut: assert x._id is not None, 'Guid was None' @pytest.mark.parametrize('Factory', guid_factories) def test_update_objects(self, Factory): objects = [] for _ in range(0, 5): objects.append(Factory()) new_ids = [o.id for o in objects] try: dtfield = [x.name for x in objects[0]._meta.get_fields() if isinstance(x, DateTimeField)][0] except IndexError: pytest.skip('Thing doesn\'t have a DateTimeField') qs = objects[0]._meta.model.objects.filter(id__in=new_ids) assert len(qs) > 0, 'No results returned' try: qs.update(**{dtfield: timezone.now()}) except Exception as ex: pytest.fail('Queryset update failed for {} with exception {}'.format(Factory._meta.model.__name__, ex)) @pytest.mark.parametrize('Factory', guid_factories) def test_update_on_objects_filtered_by_guids(self, Factory): objects = [] for _ in range(0, 5): objects.append(Factory()) new__ids = [o._id for o in objects] try: dtfield = [x.name for x in objects[0]._meta.get_fields() if isinstance(x, DateTimeField)][0] except IndexError: pytest.skip('Thing doesn\'t have a DateTimeField') qs = objects[0]._meta.model.objects.filter(guids___id__in=new__ids) assert len(qs) > 0, 'No results returned' try: qs.update(**{dtfield: timezone.now()}) except Exception as ex: pytest.fail('Queryset update failed for {} with exception {}'.format(Factory._meta.model.__name__, ex)) @pytest.mark.parametrize('Factory', guid_factories) @pytest.mark.django_assert_num_queries<|fim▁hole|> pytest.skip('Thing must have contributors') try: with django_assert_num_queries(1): [x._id for x in thing_with_contributors.contributors.all()] except Exception as ex: pytest.fail('Related manager failed for {} with exception {}'.format(Factory._meta.model.__name__, ex)) @pytest.mark.parametrize('Factory', guid_factories) @pytest.mark.django_assert_num_queries def test_count_objects(self, Factory, django_assert_num_queries): objects = [] for _ in range(0, 5): objects.append(Factory()) new_ids = [o.id for o in objects] with django_assert_num_queries(1): qs = objects[0]._meta.model.objects.filter(id__in=new_ids) count = qs.count() assert count == len(objects) @pytest.mark.parametrize('Factory', guid_factories) @pytest.mark.django_assert_num_queries def test_bulk_create_objects(self, Factory, django_assert_num_queries): objects = [] Model = Factory._meta.model kwargs = {} if Factory == PreprintFactory: # Don't try to save preprints on build when neither the subject nor provider have been saved kwargs['finish'] = False for _ in range(0, 5): objects.append(Factory.build(**kwargs)) with django_assert_num_queries(1): things = Model.objects.bulk_create(objects) assert len(things) == len(objects) @pytest.mark.parametrize('Factory', guid_factories) @pytest.mark.django_assert_num_queries def test_bulk_update_objects(self, Factory, django_assert_num_queries): objects = [] ids = range(0, 5) for id in ids: objects.append(Factory()) try: dtfield = [x.name for x in objects[0]._meta.get_fields() if isinstance(x, DateTimeField)][0] except IndexError: pytest.skip('Thing doesn\'t have a DateTimeField') for obj in objects: setattr(obj, dtfield, timezone.now()) with django_assert_num_queries(1): bulk_update(objects)<|fim▁end|>
def test_related_manager(self, Factory, django_assert_num_queries): thing_with_contributors = Factory() if not hasattr(thing_with_contributors, 'contributors'):
<|file_name|>TestMadRelaxed.rs<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */<|fim▁hole|>#include "TestMad.rs" #pragma rs_fp_relaxed<|fim▁end|>
// Don't edit this file! It is auto-generated by frameworks/rs/api/generate.sh.
<|file_name|>site.js<|end_file_name|><|fim▁begin|>/******************************************************/ /* Funciones para manejo de datos del home */ /******************************************************/<|fim▁hole|> var endpoint = 'http://localhost/RelemancoShopsWeb/api/web/v1/'; var rootURL = "/RelemancoShopsWeb/frontend/web"; var comercioMarkers = []; var markersColors = ["blue", "brown", "green", "orange", "paleblue", "yellow", "pink", "purple", "red", "darkgreen"]; var markersName = ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "M", "N", "O", "P", "Q", "R", "S", "T", "X"]; var map = null; $( document ).ready(function() { // localizarComercios(); initComerciosMap(); getRutasHistorico(); }); function getRutasHistorico() { var user = $('#perfil-usuario').data('user'); if(user) { $.ajax({ method: "GET", url: endpoint + 'rutas/obtenerhistoricorutas', data: {'id_relevador': user-1}, dataType: "json", contentType: 'application/json' }).done(function(data){ var rutas = jQuery.parseJSON(data); dibujarTablaRutas(rutas, 'tabla-body'); }).fail(function(response){ alert(response.status); }); } } function cambiarRutas() { var id = this.id; var ruta = $('#'+id).data('ruta'); clearComercios(comercioMarkers); comercioMarkers.length = 0; if(ruta && ruta.comercios && ruta.comercios.length > 0) { var comercios = ruta.comercios; geoService.clearRoutes(map); for (var i = 0; i < comercios.length; i++) { addComercio(comercios[i], 200, map); } geoService.createRoutes(comercios, map); } } function dibujarTablaRutas(rutas, idTableBody) { if(rutas.length > 0 ){ var tabla = $('#'+idTableBody); rutas.forEach(function (val) { tabla.append('<tr id="' + val.id + '"><td>' + val.fecha_asignada + '</td><td>' + val.estado.nombre + '</td></tr>'); var tr = $('#' + val.id); tr.on('click', cambiarRutas); tr.attr('id', val.id); tr.data('ruta', val); }); } } /** * Returns a random integer between min (inclusive) and max (inclusive) * Using Math.round() will give you a non-uniform distribution! */ function getRandomInt(min, max) { return Math.floor(Math.random() * (max - min + 1)) + min; } function initComerciosMap() { var myLatlng = {lat: -34.8059635, lng: -56.2145634}; map = new google.maps.Map(document.getElementById('mapa-comercios'), { zoom: 15, center: myLatlng }); } function dropComercios(comercios, map) { clearComercios(comercioMarkers); for (var i = 0; i < comercios.length; i++) { addComercio(comercios[i], 2000, map); } } function clearComercios(comercios) { for (var i = 0; i < comercios.length; i++) { comercios[i].setMap(null); } } function markerAnimation(marker){ if (marker.getAnimation() !== null) { marker.setAnimation(null); } else { marker.setAnimation(google.maps.Animation.BOUNCE); } } /* Funcion para genera la lista de comercios en la ventana de informacion del Comercio */ function generarInfoListaProductoComercio(productos){ if(productos != null){ var lista = "<ul>"; for (var i = 0; i < productos.length; i++){ lista += "<li>" + productos[i].nombre + "</li>"; } lista += "</ul>"; return lista; } return "<li>Este comercio no tiene productos asignados.</li>"; } function generarEnlaceComecio(comercio){ return "&nbsp;&nbsp;&nbsp;<a href='" + rootURL + '/comercio/view?id=' + comercio.id + "'><i class='fa fa-eye'>&nbsp;</i>Ver Comercio</a>"; } function generarInfoComercio(comercio){ if(comercio != null){ var info = "<h4>" + comercio.nombre + "</h4>"; info += "<hr/>"; info += generarInfoListaProductoComercio(comercio.productos); info += "<hr/>"; info += generarEnlaceComecio(comercio); return info; } return null; } function addComercio(comercio, timeout, map) { var loc = comercio.localizacion; var position = { lat : Number(loc.latitud), lng: Number(loc.longitud) }; var comercioMark = null; comercioMark = new google.maps.Marker({ position: position, map: map, animation: google.maps.Animation.DROP, title: comercio.nombre, icon: rootURL + "/img/GMapsMarkers/" + markersColors[getRandomInt(0,9)] + "_Marker" + markersName[getRandomInt(0,19)] + ".png" }); var infowindow = new google.maps.InfoWindow({ content: generarInfoComercio(comercio) }); comercioMark.addListener('click', function() { infowindow.addListener('closeclick', function(){ comercioMark.setAnimation(null); }); markerAnimation(this); infowindow.open(map, this); }); comercioMarkers.push(comercioMark); } var geoService = { createRoutes: function(markers, map) { var directionsService = new google.maps.DirectionsService(); map._directions = []; function renderDirections(result) { var directionsRenderer = new google.maps.DirectionsRenderer({ suppressMarkers: true }); directionsRenderer.setMap(map); directionsRenderer.setDirections(result); map._directions.push(directionsRenderer); } function requestDirections(start, end) { directionsService.route({ origin: start, destination: end, travelMode: google.maps.DirectionsTravelMode.DRIVING, unitSystem: google.maps.UnitSystem.METRIC }, function (result, status) { renderDirections(result); }); } for (var i = 0; i < markers.length; i++) { if (i < markers.length - 1) { var origen = {lat: Number(markers[i].localizacion.latitud), lng: Number(markers[i].localizacion.longitud)}; var destino = {lat: Number(markers[i + 1].localizacion.latitud), lng: Number(markers[i + 1].localizacion.longitud)}; requestDirections(origen, destino); } } }, clearRoutes: function (Gmap) { if (Gmap._directions && Gmap._directions.length > 0) { var directions = Gmap._directions; directions.forEach(function (val) { val.setMap(null); }); } } };<|fim▁end|>
<|file_name|>kitchen_sink.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import kivymd.snackbar as Snackbar from kivy.app import App from kivy.lang import Builder from kivy.metrics import dp from kivy.properties import ObjectProperty from kivy.uix.image import Image from kivymd.bottomsheet import MDListBottomSheet, MDGridBottomSheet from kivymd.button import MDIconButton from kivymd.label import MDLabel from kivymd.list import ILeftBody, ILeftBodyTouch, IRightBodyTouch from kivymd.navigationdrawer import NavigationDrawer from kivymd.selectioncontrols import MDCheckbox from kivymd.theming import ThemeManager from kivymd.dialog import MDDialog from kivymd.time_picker import MDTimePicker from kivymd.date_picker import MDDatePicker from kivymd.material_resources import DEVICE_TYPE main_widget_kv = ''' #:import Toolbar kivymd.toolbar.Toolbar #:import ThemeManager kivymd.theming.ThemeManager #:import NavigationDrawer kivymd.navigationdrawer.NavigationDrawer #:import MDCheckbox kivymd.selectioncontrols.MDCheckbox #:import MDSwitch kivymd.selectioncontrols.MDSwitch #:import MDList kivymd.list.MDList #:import OneLineListItem kivymd.list.OneLineListItem #:import TwoLineListItem kivymd.list.TwoLineListItem #:import ThreeLineListItem kivymd.list.ThreeLineListItem #:import OneLineAvatarListItem kivymd.list.OneLineAvatarListItem #:import OneLineIconListItem kivymd.list.OneLineIconListItem #:import OneLineAvatarIconListItem kivymd.list.OneLineAvatarIconListItem #:import SingleLineTextField kivymd.textfields.SingleLineTextField #:import MDSpinner kivymd.spinner.MDSpinner #:import MDCard kivymd.card.MDCard #:import MDSeparator kivymd.card.MDSeparator #:import MDDropdownMenu kivymd.menu.MDDropdownMenu #:import get_color_from_hex kivy.utils.get_color_from_hex #:import colors kivymd.color_definitions.colors #:import SmartTile kivymd.grid.SmartTile #:import MDSlider kivymd.slider.MDSlider #:import MDTabbedPanel kivymd.tabs.MDTabbedPanel #:import MDTab kivymd.tabs.MDTab #:import MDProgressBar kivymd.progressbar.MDProgressBar #:import MDAccordion kivymd.accordion.MDAccordion #:import MDAccordionItem kivymd.accordion.MDAccordionItem #:import MDThemePicker kivymd.theme_picker.MDThemePicker #:import MDBottomNavigation kivymd.tabs.MDBottomNavigation #:import MDBottomNavigationItem kivymd.tabs.MDBottomNavigationItem BoxLayout: orientation: 'vertical' Toolbar: id: toolbar title: 'KivyMD Kitchen Sink' background_color: app.theme_cls.primary_color background_palette: 'Primary' background_hue: '500' left_action_items: [['menu', lambda x: app.nav_drawer.toggle()]] right_action_items: [['dots-vertical', lambda x: app.nav_drawer.toggle()]] ScreenManager: id: scr_mngr Screen: name: 'bottomsheet' MDRaisedButton: text: "Open List Bottom Sheet" opposite_colors: True size_hint: None, None size: 4 * dp(48), dp(48) pos_hint: {'center_x': 0.5, 'center_y': 0.6} on_release: app.show_example_bottom_sheet() MDRaisedButton: text: "Open grid bottom sheet" opposite_colors: True size_hint: None, None size: 4 * dp(48), dp(48) pos_hint: {'center_x': 0.5, 'center_y': 0.3} on_release: app.show_example_grid_bottom_sheet() Screen: name: 'button' BoxLayout: size_hint: None, None size: '88dp', '48dp' padding: '12dp' pos_hint: {'center_x': 0.75, 'center_y': 0.8} MDLabel: font_style: 'Body1' theme_text_color: 'Primary' text: "Disable buttons" size_hint_x:None width: '56dp' MDCheckbox: id: disable_the_buttons MDIconButton: icon: 'sd' pos_hint: {'center_x': 0.25, 'center_y': 0.8} disabled: disable_the_buttons.active MDFlatButton: text: 'MDFlatButton' pos_hint: {'center_x': 0.5, 'center_y': 0.6} disabled: disable_the_buttons.active MDRaisedButton: text: "MDRaisedButton" elevation_normal: 2 opposite_colors: True pos_hint: {'center_x': 0.5, 'center_y': 0.4} disabled: disable_the_buttons.active MDFloatingActionButton: id: float_act_btn icon: 'plus' opposite_colors: True elevation_normal: 8 pos_hint: {'center_x': 0.5, 'center_y': 0.2} disabled: disable_the_buttons.active Screen: name: 'card' MDCard: size_hint: None, None size: dp(320), dp(180) pos_hint: {'center_x': 0.5, 'center_y': 0.7} MDCard: size_hint: None, None size: dp(320), dp(180) pos_hint: {'center_x': 0.5, 'center_y': 0.3} BoxLayout: orientation:'vertical' padding: dp(8) MDLabel: text: 'Title' theme_text_color: 'Secondary' font_style:"Title" size_hint_y: None height: dp(36) MDSeparator: height: dp(1) MDLabel: text: 'Body' theme_text_color: 'Primary' Screen: name: 'slider' BoxLayout: MDSlider: id: hslider min:0 max:100 value: 10 MDSlider: id: vslider orientation:'vertical' min:0 max:100 value: hslider.value Screen: name: 'dialog' MDRaisedButton: text: "Open dialog" size_hint: None, None size: 3 * dp(48), dp(48) pos_hint: {'center_x': 0.5, 'center_y': 0.5} opposite_colors: True on_release: app.show_example_dialog() Screen: name: 'grid' ScrollView: do_scroll_x: False GridLayout: cols: 3 row_default_height: (self.width - self.cols*self.spacing[0])/self.cols row_force_default: True size_hint_y: None height: 8 * dp(100) # /1 * self.row_default_height padding: dp(4), dp(4) spacing: dp(4) SmartTileWithLabel: mipmap: True source: './assets/african-lion-951778_1280.jpg' text: "African Lion" SmartTile: mipmap: True source: './assets/beautiful-931152_1280.jpg' SmartTile: mipmap: True source: './assets/african-lion-951778_1280.jpg' SmartTile: mipmap: True source: './assets/guitar-1139397_1280.jpg' SmartTile: mipmap: True source: './assets/robin-944887_1280.jpg' SmartTile: mipmap: True source: './assets/kitten-1049129_1280.jpg' SmartTile: mipmap: True source: './assets/light-bulb-1042480_1280.jpg' SmartTile: mipmap: True source: './assets/tangerines-1111529_1280.jpg' Screen: name: 'list' ScrollView: do_scroll_x: False MDList: id: ml OneLineListItem: text: "One-line item" TwoLineListItem: text: "Two-line item" secondary_text: "Secondary text here" ThreeLineListItem: text: "Three-line item" secondary_text: "This is a multi-line label where you can fit more text than usual" OneLineAvatarListItem: text: "Single-line item with avatar" AvatarSampleWidget: source: './assets/avatar.png' TwoLineAvatarListItem: type: "two-line" text: "Two-line item..." secondary_text: "with avatar" AvatarSampleWidget: source: './assets/avatar.png' ThreeLineAvatarListItem: type: "three-line" text: "Three-line item..." secondary_text: "...with avatar..." + '\\n' + "and third line!" AvatarSampleWidget: source: './assets/avatar.png' OneLineIconListItem: text: "Single-line item with left icon" IconLeftSampleWidget: id: li_icon_1 icon: 'star-circle' TwoLineIconListItem: text: "Two-line item..." secondary_text: "...with left icon" IconLeftSampleWidget: id: li_icon_2 icon: 'comment-text' ThreeLineIconListItem: text: "Three-line item..." secondary_text: "...with left icon..." + '\\n' + "and third line!" IconLeftSampleWidget: id: li_icon_3 icon: 'sd' OneLineAvatarIconListItem: text: "Single-line + avatar&icon" AvatarSampleWidget: source: './assets/avatar.png' IconRightSampleWidget: TwoLineAvatarIconListItem: text: "Two-line item..." secondary_text: "...with avatar&icon" AvatarSampleWidget: source: './assets/avatar.png' IconRightSampleWidget: ThreeLineAvatarIconListItem: text: "Three-line item..." secondary_text: "...with avatar&icon..." + '\\n' + "and third line!" AvatarSampleWidget: source: './assets/avatar.png' IconRightSampleWidget: Screen: name: 'menu' MDRaisedButton: size_hint: None, None size: 3 * dp(48), dp(48) text: 'Open menu' opposite_colors: True pos_hint: {'center_x': 0.1, 'center_y': 0.9} on_release: MDDropdownMenu(items=app.menu_items, width_mult=4).open(self) MDRaisedButton: size_hint: None, None size: 3 * dp(48), dp(48) text: 'Open menu' opposite_colors: True pos_hint: {'center_x': 0.1, 'center_y': 0.1} on_release: MDDropdownMenu(items=app.menu_items, width_mult=4).open(self) MDRaisedButton: size_hint: None, None size: 3 * dp(48), dp(48) text: 'Open menu' opposite_colors: True pos_hint: {'center_x': 0.9, 'center_y': 0.1} on_release: MDDropdownMenu(items=app.menu_items, width_mult=4).open(self) MDRaisedButton: size_hint: None, None size: 3 * dp(48), dp(48) text: 'Open menu' opposite_colors: True pos_hint: {'center_x': 0.9, 'center_y': 0.9} on_release: MDDropdownMenu(items=app.menu_items, width_mult=4).open(self) MDRaisedButton: size_hint: None, None size: 3 * dp(48), dp(48) text: 'Open menu' opposite_colors: True pos_hint: {'center_x': 0.5, 'center_y': 0.5} on_release: MDDropdownMenu(items=app.menu_items, width_mult=4).open(self) Screen: name: 'progress' MDCheckbox: id: chkbox size_hint: None, None size: dp(48), dp(48) pos_hint: {'center_x': 0.5, 'center_y': 0.4} active: True MDSpinner: id: spinner size_hint: None, None size: dp(46), dp(46) pos_hint: {'center_x': 0.5, 'center_y': 0.5} active: True if chkbox.active else False Screen: name: 'progressbars' BoxLayout: orientation:'vertical' padding: '8dp' MDSlider: id:progress_slider min:0 max:100 value: 40 MDProgressBar: value: progress_slider.value MDProgressBar: reversed: True value: progress_slider.value BoxLayout: MDProgressBar: orientation:"vertical" reversed: True value: progress_slider.value MDProgressBar: orientation:"vertical" value: progress_slider.value Screen: name: 'selectioncontrols' MDCheckbox: id: grp_chkbox_1 group: 'test' size_hint: None, None size: dp(48), dp(48) pos_hint: {'center_x': 0.25, 'center_y': 0.5} MDCheckbox: id: grp_chkbox_2 group: 'test' size_hint: None, None size: dp(48), dp(48) pos_hint: {'center_x': 0.5, 'center_y': 0.5} MDSwitch: size_hint: None, None size: dp(36), dp(48) pos_hint: {'center_x': 0.75, 'center_y': 0.5} active: False Screen: name: 'snackbar' MDRaisedButton: text: "Create simple snackbar" size_hint: None, None size: 4 * dp(48), dp(48) pos_hint: {'center_x': 0.5, 'center_y': 0.75} opposite_colors: True on_release: app.show_example_snackbar('simple') MDRaisedButton: text: "Create snackbar with button" size_hint: None, None size: 4 * dp(48), dp(48) pos_hint: {'center_x': 0.5, 'center_y': 0.5} opposite_colors: True on_release: app.show_example_snackbar('button') MDRaisedButton: text: "Create snackbar with a lot of text" size_hint: None, None size: 5 * dp(48), dp(48) pos_hint: {'center_x': 0.5, 'center_y': 0.25} opposite_colors: True on_release: app.show_example_snackbar('verylong') Screen: name: 'textfields' ScrollView: BoxLayout: orientation: 'vertical' size_hint_y: None height: dp(1000) BoxLayout: size_hint_y: None height: dp(400) padding: dp(48) orientation: 'vertical' spacing: 10 SingleLineTextField: hint_text: "No helper text" SingleLineTextField: hint_text: "Helper text on focus" message: "This will disappear when you click off" message_mode: "on_focus" SingleLineTextField: hint_text: "Persistent helper text" message: "Text is always here" message_mode: "persistent" SingleLineTextField: id: text_field_error hint_text: "Helper text on error (Hit Enter with two characters here)" message: "Two is my least favorite number" message_mode: "on_error" SingleLineTextField:<|fim▁hole|> max_text_length: 10 SingleLineTextField: hint_text: "required = True" required: True message_mode: "on_error" BoxLayout: MDLabel: font_style: 'Body1' theme_text_color: 'Primary' text: "Body1 label" halign: 'center' MDLabel: font_style: 'Body2' theme_text_color: 'Primary' text: "Body2 label" halign: 'center' BoxLayout: MDLabel: font_style: 'Caption' theme_text_color: 'Primary' text: "Caption label" halign: 'center' MDLabel: font_style: 'Subhead' theme_text_color: 'Primary' text: "Subhead label" halign: 'center' BoxLayout: MDLabel: font_style: 'Title' theme_text_color: 'Primary' text: "Title label" halign: 'center' MDLabel: font_style: 'Headline' theme_text_color: 'Primary' text: "Headline label" halign: 'center' MDLabel: font_style: 'Display1' theme_text_color: 'Primary' text: "Display1 label" halign: 'center' size_hint_y: None height: self.texture_size[1] + dp(4) MDLabel: font_style: 'Display2' theme_text_color: 'Primary' text: "Display2 label" halign: 'center' size_hint_y: None height: self.texture_size[1] + dp(4) MDLabel: font_style: 'Display3' theme_text_color: 'Primary' text: "Display3 label" halign: 'center' size_hint_y: None height: self.texture_size[1] + dp(4) MDLabel: font_style: 'Display4' theme_text_color: 'Primary' text: "Display4 label" halign: 'center' size_hint_y: None height: self.texture_size[1] + dp(4) BoxLayout: MDLabel: font_style: 'Body1' theme_text_color: 'Primary' text: "Primary color" halign: 'center' MDLabel: font_style: 'Body1' theme_text_color: 'Secondary' text: "Secondary color" halign: 'center' BoxLayout: MDLabel: font_style: 'Body1' theme_text_color: 'Hint' text: "Hint color" halign: 'center' MDLabel: font_style: 'Body1' theme_text_color: 'Error' text: "Error color" halign: 'center' MDLabel: font_style: 'Body1' theme_text_color: 'Custom' text_color: (0,1,0,.4) text: "Custom" halign: 'center' Screen: name: 'theming' BoxLayout: orientation: 'vertical' size_hint_y: None height: dp(80) center_y: self.parent.center_y MDRaisedButton: size_hint: None, None size: 3 * dp(48), dp(48) center_x: self.parent.center_x text: 'Change theme' on_release: MDThemePicker().open() opposite_colors: True pos_hint: {'center_x': 0.5} MDLabel: text: "Current: " + app.theme_cls.theme_style + ", " + app.theme_cls.primary_palette theme_text_color: 'Primary' pos_hint: {'center_x': 0.5} halign: 'center' Screen: name: 'toolbar' Toolbar: title: "Simple toolbar" pos_hint: {'center_x': 0.5, 'center_y': 0.75} background_color: get_color_from_hex(colors['Teal']['500']) background_palette: 'Teal' background_hue: '500' Toolbar: title: "Toolbar with right buttons" pos_hint: {'center_x': 0.5, 'center_y': 0.5} background_color: get_color_from_hex(colors['Amber']['700']) background_palette: 'Amber' background_hue: '700' right_action_items: [['content-copy', lambda x: None]] Toolbar: title: "Toolbar with left and right buttons" pos_hint: {'center_x': 0.5, 'center_y': 0.25} background_color: get_color_from_hex(colors['DeepPurple']['A400']) background_palette: 'DeepPurple' background_hue: 'A400' left_action_items: [['arrow-left', lambda x: None]] right_action_items: [['lock', lambda x: None], \ ['camera', lambda x: None], \ ['play', lambda x: None]] Screen: name: 'tabs' MDTabbedPanel: id: tab_panel tab_display_mode:'text' MDTab: name: 'music' text: "Music" # Why are these not set!!! icon: "playlist-play" MDLabel: font_style: 'Body1' theme_text_color: 'Primary' text: "Here is my music list :)" halign: 'center' MDTab: name: 'movies' text: 'Movies' icon: "movie" MDLabel: font_style: 'Body1' theme_text_color: 'Primary' text: "Show movies here :)" halign: 'center' BoxLayout: size_hint_y:None height: '48dp' padding: '12dp' MDLabel: font_style: 'Body1' theme_text_color: 'Primary' text: "Use icons" size_hint_x:None width: '64dp' MDCheckbox: on_state: tab_panel.tab_display_mode = 'icons' if tab_panel.tab_display_mode=='text' else 'text' Screen: name: 'accordion' BoxLayout: MDAccordion: orientation:'vertical' size_hint_x: None width: '240dp' MDAccordionItem: id: accordion_item title:'Item 1' icon: 'home' ScrollView: MDList: OneLineListItem: text: "Subitem 1" theme_text_color: 'Custom' text_color: [1,1,1,1] OneLineListItem: text: "Subitem 2" theme_text_color: 'Custom' text_color: [1,1,1,1] OneLineListItem: text: "Subitem 3" theme_text_color: 'Custom' text_color: [1,1,1,1] MDAccordionItem: title:'Item 2' icon: 'earth' ScrollView: MDList: OneLineListItem: text: "Subitem 4" theme_text_color: 'Custom' text_color: [1,1,1,1] OneLineListItem: text: "Subitem 5" theme_text_color: 'Custom' text_color: [1,1,1,1] OneLineListItem: text: "Subitem 6" theme_text_color: 'Custom' text_color: [1,1,1,1] MDAccordionItem: title:'Item 3' icon: 'account' ScrollView: MDList: OneLineListItem: text: "Subitem 7" theme_text_color: 'Custom' text_color: [1,1,1,1] OneLineListItem: text: "Subitem 8" theme_text_color: 'Custom' text_color: [1,1,1,1] OneLineListItem: text: "Subitem 9" theme_text_color: 'Custom' text_color: [1,1,1,1] MDLabel: text: 'Content' theme_text_color: 'Primary' Screen: name: 'pickers' BoxLayout: spacing: dp(40) orientation: 'vertical' size_hint_x: None pos_hint: {'center_x': 0.5, 'center_y': 0.5} BoxLayout: orientation: 'vertical' # size_hint: (None, None) MDRaisedButton: text: "Open time picker" size_hint: None, None size: 3 * dp(48), dp(48) pos_hint: {'center_x': 0.5, 'center_y': 0.5} opposite_colors: True on_release: app.show_example_time_picker() MDLabel: id: time_picker_label theme_text_color: 'Primary' size_hint: None, None size: dp(48)*3, dp(48) pos_hint: {'center_x': 0.5, 'center_y': 0.5} BoxLayout: size: dp(48)*3, dp(48) size_hint: (None, None) pos_hint: {'center_x': 0.5, 'center_y': 0.5} MDLabel: theme_text_color: 'Primary' text: "Start on previous time" size_hint: None, None size: dp(130), dp(48) MDCheckbox: id: time_picker_use_previous_time size_hint: None, None size: dp(48), dp(48) BoxLayout: orientation: 'vertical' MDRaisedButton: text: "Open date picker" size_hint: None, None size: 3 * dp(48), dp(48) pos_hint: {'center_x': 0.5, 'center_y': 0.5} opposite_colors: True on_release: app.show_example_date_picker() MDLabel: id: date_picker_label theme_text_color: 'Primary' size_hint: None, None size: dp(48)*3, dp(48) pos_hint: {'center_x': 0.5, 'center_y': 0.5} BoxLayout: size: dp(48)*3, dp(48) size_hint: (None, None) pos_hint: {'center_x': 0.5, 'center_y': 0.5} MDLabel: theme_text_color: 'Primary' text: "Start on previous date" size_hint: None, None size: dp(130), dp(48) MDCheckbox: id: date_picker_use_previous_date size_hint: None, None size: dp(48), dp(48) Screen: name: 'bottom_navigation' MDBottomNavigation: id: bottom_navigation_demo MDBottomNavigationItem: name: 'octagon' text: "Warning" icon: "alert-octagon" MDLabel: font_style: 'Body1' theme_text_color: 'Primary' text: "Warning!" halign: 'center' MDBottomNavigationItem: name: 'banking' text: "Bank" icon: 'bank' BoxLayout: orientation: 'vertical' size_hint_y: None padding: dp(48) spacing: 10 SingleLineTextField: hint_text: "You can put any widgets here" message: "Hello :)" message_mode: "on_focus" MDBottomNavigationItem: name: 'bottom_navigation_desktop_1' text: "Hello" icon: 'alert' id: bottom_navigation_desktop_1 BoxLayout: orientation: 'vertical' size_hint_y: None padding: dp(48) spacing: 10 SingleLineTextField: hint_text: "Hello again" MDBottomNavigationItem: name: 'bottom_navigation_desktop_2' text: "Food" icon: 'food' id: bottom_navigation_desktop_2 MDLabel: font_style: 'Body1' theme_text_color: 'Primary' text: "Cheese!" halign: 'center' <KitchenSinkNavDrawer> title: "NavigationDrawer" NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Bottom sheets" on_release: app.root.ids.scr_mngr.current = 'bottomsheet' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Buttons" on_release: app.root.ids.scr_mngr.current = 'button' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Cards" on_release: app.root.ids.scr_mngr.current = 'card' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Dialogs" on_release: app.root.ids.scr_mngr.current = 'dialog' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Grid lists" on_release: app.root.ids.scr_mngr.current = 'grid' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Lists" on_release: app.root.ids.scr_mngr.current = 'list' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Menus" on_release: app.root.ids.scr_mngr.current = 'menu' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Progress & activity" on_release: app.root.ids.scr_mngr.current = 'progress' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Progress bars" on_release: app.root.ids.scr_mngr.current = 'progressbars' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Selection controls" on_release: app.root.ids.scr_mngr.current = 'selectioncontrols' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Snackbars" on_release: app.root.ids.scr_mngr.current = 'snackbar' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Text fields" on_release: app.root.ids.scr_mngr.current = 'textfields' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Themes" on_release: app.root.ids.scr_mngr.current = 'theming' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Toolbars" on_release: app.root.ids.scr_mngr.current = 'toolbar' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Sliders" on_release: app.root.ids.scr_mngr.current = 'slider' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Tabs" on_release: app.root.ids.scr_mngr.current = 'tabs' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Accordion" on_release: app.root.ids.scr_mngr.current = 'accordion' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Pickers" on_release: app.root.ids.scr_mngr.current = 'pickers' NavigationDrawerIconButton: icon: 'checkbox-blank-circle' text: "Bottom Navigation" on_release: app.root.ids.scr_mngr.current = 'bottom_navigation' ''' class KitchenSinkNavDrawer(NavigationDrawer): pass class KitchenSink(App): theme_cls = ThemeManager() nav_drawer = ObjectProperty() previous_date = ObjectProperty() menu_items = [ {'viewclass': 'MDMenuItem', 'text': 'Example item'}, {'viewclass': 'MDMenuItem', 'text': 'Example item'}, {'viewclass': 'MDMenuItem', 'text': 'Example item'}, {'viewclass': 'MDMenuItem', 'text': 'Example item'}, {'viewclass': 'MDMenuItem', 'text': 'Example item'}, {'viewclass': 'MDMenuItem', 'text': 'Example item'}, {'viewclass': 'MDMenuItem', 'text': 'Example item'}, ] def build(self): main_widget = Builder.load_string(main_widget_kv) # self.theme_cls.theme_style = 'Dark' main_widget.ids.text_field_error.bind( on_text_validate=self.set_error_message, on_focus=self.set_error_message) self.nav_drawer = KitchenSinkNavDrawer() self.bottom_navigation_remove_mobile(main_widget) return main_widget def bottom_navigation_remove_mobile(self, widget): # Removes some items from bottom-navigation demo when on mobile if DEVICE_TYPE == 'mobile': widget.ids.bottom_navigation_demo.remove_widget(widget.ids.bottom_navigation_desktop_2) if DEVICE_TYPE == 'mobile' or DEVICE_TYPE == 'tablet': widget.ids.bottom_navigation_demo.remove_widget(widget.ids.bottom_navigation_desktop_1) def show_example_snackbar(self, snack_type): if snack_type == 'simple': Snackbar.make("This is a snackbar!") elif snack_type == 'button': Snackbar.make("This is a snackbar", button_text="with a button!", button_callback=lambda *args: 2) elif snack_type == 'verylong': Snackbar.make("This is a very very very very very very very long " "snackbar!", button_text="Hello world") def show_example_dialog(self): content = MDLabel(font_style='Body1', theme_text_color='Secondary', text="This is a dialog with a title and some text. " "That's pretty awesome right!", valign='top') content.bind(size=content.setter('text_size')) self.dialog = MDDialog(title="This is a test dialog", content=content, size_hint=(.8, None), height=dp(200), auto_dismiss=False) self.dialog.add_action_button("Dismiss", action=lambda *x: self.dialog.dismiss()) self.dialog.open() def get_time_picker_data(self, instance, time): self.root.ids.time_picker_label.text = str(time) self.previous_time = time def show_example_time_picker(self): self.time_dialog = MDTimePicker() self.time_dialog.bind(time=self.get_time_picker_data) if self.root.ids.time_picker_use_previous_time.active: try: self.time_dialog.set_time(self.previous_time) except AttributeError: pass self.time_dialog.open() def set_previous_date(self, date_obj): self.previous_date = date_obj self.root.ids.date_picker_label.text = str(date_obj) def show_example_date_picker(self): if self.root.ids.date_picker_use_previous_date.active: pd = self.previous_date try: MDDatePicker(self.set_previous_date, pd.year, pd.month, pd.day).open() except AttributeError: MDDatePicker(self.set_previous_date).open() else: MDDatePicker(self.set_previous_date).open() def show_example_bottom_sheet(self): bs = MDListBottomSheet() bs.add_item("Here's an item with text only", lambda x: x) bs.add_item("Here's an item with an icon", lambda x: x, icon='clipboard-account') bs.add_item("Here's another!", lambda x: x, icon='nfc') bs.open() def show_example_grid_bottom_sheet(self): bs = MDGridBottomSheet() bs.add_item("Facebook", lambda x: x, icon_src='./assets/facebook-box.png') bs.add_item("YouTube", lambda x: x, icon_src='./assets/youtube-play.png') bs.add_item("Twitter", lambda x: x, icon_src='./assets/twitter.png') bs.add_item("Da Cloud", lambda x: x, icon_src='./assets/cloud-upload.png') bs.add_item("Camera", lambda x: x, icon_src='./assets/camera.png') bs.open() def set_error_message(self, *args): if len(self.root.ids.text_field_error.text) == 2: self.root.ids.text_field_error.error = True else: self.root.ids.text_field_error.error = False def on_pause(self): return True def on_stop(self): pass class AvatarSampleWidget(ILeftBody, Image): pass class IconLeftSampleWidget(ILeftBodyTouch, MDIconButton): pass class IconRightSampleWidget(IRightBodyTouch, MDCheckbox): pass if __name__ == '__main__': KitchenSink().run()<|fim▁end|>
hint_text: "Max text length = 10"
<|file_name|>side_offsets.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Servo Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A group of side offsets, which correspond to top/left/bottom/right for borders, padding, //! and margins in CSS. use crate::length::Length; use crate::num::Zero; use crate::scale::Scale; use crate::Vector2D; use core::cmp::{Eq, PartialEq}; use core::fmt; use core::hash::Hash; use core::marker::PhantomData; use core::ops::{Add, Div, DivAssign, Mul, MulAssign, Neg}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; /// A group of 2D side offsets, which correspond to top/right/bottom/left for borders, padding, /// and margins in CSS, optionally tagged with a unit. #[repr(C)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr( feature = "serde", serde(bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'de>")) )] pub struct SideOffsets2D<T, U> { pub top: T, pub right: T, pub bottom: T, pub left: T, #[doc(hidden)] pub _unit: PhantomData<U>, } #[cfg(feature = "arbitrary")] impl<'a, T, U> arbitrary::Arbitrary<'a> for SideOffsets2D<T, U> where T: arbitrary::Arbitrary<'a>, { fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> { let (top, right, bottom, left) = arbitrary::Arbitrary::arbitrary(u)?; Ok(SideOffsets2D { top, right, bottom, left, _unit: PhantomData, }) } } impl<T: Copy, U> Copy for SideOffsets2D<T, U> {} impl<T: Clone, U> Clone for SideOffsets2D<T, U> { fn clone(&self) -> Self { SideOffsets2D { top: self.top.clone(), right: self.right.clone(), bottom: self.bottom.clone(), left: self.left.clone(), _unit: PhantomData, } } } impl<T, U> Eq for SideOffsets2D<T, U> where T: Eq {} impl<T, U> PartialEq for SideOffsets2D<T, U> where T: PartialEq, { fn eq(&self, other: &Self) -> bool { self.top == other.top && self.right == other.right && self.bottom == other.bottom && self.left == other.left } } impl<T, U> Hash for SideOffsets2D<T, U> where T: Hash, { fn hash<H: core::hash::Hasher>(&self, h: &mut H) { self.top.hash(h); self.right.hash(h); self.bottom.hash(h); self.left.hash(h); } } impl<T: fmt::Debug, U> fmt::Debug for SideOffsets2D<T, U> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "({:?},{:?},{:?},{:?})", self.top, self.right, self.bottom, self.left ) } } impl<T: Default, U> Default for SideOffsets2D<T, U> { fn default() -> Self { SideOffsets2D { top: Default::default(), right: Default::default(), bottom: Default::default(), left: Default::default(), _unit: PhantomData, } } } impl<T, U> SideOffsets2D<T, U> { /// Constructor taking a scalar for each side. /// /// Sides are specified in top-right-bottom-left order following /// CSS's convention. pub const fn new(top: T, right: T, bottom: T, left: T) -> Self { SideOffsets2D { top, right, bottom, left, _unit: PhantomData, } } /// Constructor taking a typed Length for each side. /// /// Sides are specified in top-right-bottom-left order following /// CSS's convention. pub fn from_lengths( top: Length<T, U>, right: Length<T, U>, bottom: Length<T, U>, left: Length<T, U>, ) -> Self { SideOffsets2D::new(top.0, right.0, bottom.0, left.0) } /// Construct side offsets from min and a max vector offsets. /// /// The outer rect of the resulting side offsets is equivalent to translating /// a rectangle's upper-left corner with the min vector and translating the /// bottom-right corner with the max vector. pub fn from_vectors_outer(min: Vector2D<T, U>, max: Vector2D<T, U>) -> Self where T: Neg<Output = T>, { SideOffsets2D { left: -min.x, top: -min.y, right: max.x, bottom: max.y, _unit: PhantomData, } } /// Construct side offsets from min and a max vector offsets. /// /// The inner rect of the resulting side offsets is equivalent to translating /// a rectangle's upper-left corner with the min vector and translating the /// bottom-right corner with the max vector. pub fn from_vectors_inner(min: Vector2D<T, U>, max: Vector2D<T, U>) -> Self where T: Neg<Output = T>, { SideOffsets2D { left: min.x, top: min.y, right: -max.x, bottom: -max.y, _unit: PhantomData, } } /// Constructor, setting all sides to zero. pub fn zero() -> Self where T: Zero,<|fim▁hole|> { SideOffsets2D::new(Zero::zero(), Zero::zero(), Zero::zero(), Zero::zero()) } /// Returns `true` if all side offsets are zero. pub fn is_zero(&self) -> bool where T: Zero + PartialEq, { let zero = T::zero(); self.top == zero && self.right == zero && self.bottom == zero && self.left == zero } /// Constructor setting the same value to all sides, taking a scalar value directly. pub fn new_all_same(all: T) -> Self where T : Copy { SideOffsets2D::new(all, all, all, all) } /// Constructor setting the same value to all sides, taking a typed Length. pub fn from_length_all_same(all: Length<T, U>) -> Self where T : Copy { SideOffsets2D::new_all_same(all.0) } pub fn horizontal(&self) -> T where T: Copy + Add<T, Output = T> { self.left + self.right } pub fn vertical(&self) -> T where T: Copy + Add<T, Output = T> { self.top + self.bottom } } impl<T, U> Add for SideOffsets2D<T, U> where T: Add<T, Output = T>, { type Output = Self; fn add(self, other: Self) -> Self { SideOffsets2D::new( self.top + other.top, self.right + other.right, self.bottom + other.bottom, self.left + other.left, ) } } impl<T: Copy + Mul, U> Mul<T> for SideOffsets2D<T, U> { type Output = SideOffsets2D<T::Output, U>; #[inline] fn mul(self, scale: T) -> Self::Output { SideOffsets2D::new( self.top * scale, self.right * scale, self.bottom * scale, self.left * scale, ) } } impl<T: Copy + MulAssign, U> MulAssign<T> for SideOffsets2D<T, U> { #[inline] fn mul_assign(&mut self, other: T) { self.top *= other; self.right *= other; self.bottom *= other; self.left *= other; } } impl<T: Copy + Mul, U1, U2> Mul<Scale<T, U1, U2>> for SideOffsets2D<T, U1> { type Output = SideOffsets2D<T::Output, U2>; #[inline] fn mul(self, scale: Scale<T, U1, U2>) -> Self::Output { SideOffsets2D::new( self.top * scale.0, self.right * scale.0, self.bottom * scale.0, self.left * scale.0, ) } } impl<T: Copy + MulAssign, U> MulAssign<Scale<T, U, U>> for SideOffsets2D<T, U> { #[inline] fn mul_assign(&mut self, other: Scale<T, U, U>) { *self *= other.0; } } impl<T: Copy + Div, U> Div<T> for SideOffsets2D<T, U> { type Output = SideOffsets2D<T::Output, U>; #[inline] fn div(self, scale: T) -> Self::Output { SideOffsets2D::new( self.top / scale, self.right / scale, self.bottom / scale, self.left / scale, ) } } impl<T: Copy + DivAssign, U> DivAssign<T> for SideOffsets2D<T, U> { #[inline] fn div_assign(&mut self, other: T) { self.top /= other; self.right /= other; self.bottom /= other; self.left /= other; } } impl<T: Copy + Div, U1, U2> Div<Scale<T, U1, U2>> for SideOffsets2D<T, U2> { type Output = SideOffsets2D<T::Output, U1>; #[inline] fn div(self, scale: Scale<T, U1, U2>) -> Self::Output { SideOffsets2D::new( self.top / scale.0, self.right / scale.0, self.bottom / scale.0, self.left / scale.0, ) } } impl<T: Copy + DivAssign, U> DivAssign<Scale<T, U, U>> for SideOffsets2D<T, U> { fn div_assign(&mut self, other: Scale<T, U, U>) { *self /= other.0; } } #[test] fn from_vectors() { use crate::{point2, vec2}; type Box2D = crate::default::Box2D<i32>; let b = Box2D { min: point2(10, 10), max: point2(20, 20), }; let outer = b.outer_box(SideOffsets2D::from_vectors_outer(vec2(-1, -2), vec2(3, 4))); let inner = b.inner_box(SideOffsets2D::from_vectors_inner(vec2(1, 2), vec2(-3, -4))); assert_eq!( outer, Box2D { min: point2(9, 8), max: point2(23, 24) } ); assert_eq!( inner, Box2D { min: point2(11, 12), max: point2(17, 16) } ); } #[test] fn test_is_zero() { let s1: SideOffsets2D<f32, ()> = SideOffsets2D::new_all_same(0.0); assert!(s1.is_zero()); let s2: SideOffsets2D<f32, ()> = SideOffsets2D::new(1.0, 2.0, 3.0, 4.0); assert!(!s2.is_zero()); } #[cfg(test)] mod ops { use crate::Scale; pub enum Mm {} pub enum Cm {} type SideOffsets2D<T> = crate::default::SideOffsets2D<T>; type SideOffsets2DMm<T> = crate::SideOffsets2D<T, Mm>; type SideOffsets2DCm<T> = crate::SideOffsets2D<T, Cm>; #[test] fn test_mul_scalar() { let s = SideOffsets2D::new(1.0, 2.0, 3.0, 4.0); let result = s * 3.0; assert_eq!(result, SideOffsets2D::new(3.0, 6.0, 9.0, 12.0)); } #[test] fn test_mul_assign_scalar() { let mut s = SideOffsets2D::new(1.0, 2.0, 3.0, 4.0); s *= 2.0; assert_eq!(s, SideOffsets2D::new(2.0, 4.0, 6.0, 8.0)); } #[test] fn test_mul_scale() { let s = SideOffsets2DMm::new(0.0, 1.0, 3.0, 2.0); let cm_per_mm: Scale<f32, Mm, Cm> = Scale::new(0.1); let result = s * cm_per_mm; assert_eq!(result, SideOffsets2DCm::new(0.0, 0.1, 0.3, 0.2)); } #[test] fn test_mul_assign_scale() { let mut s = SideOffsets2DMm::new(2.0, 4.0, 6.0, 8.0); let scale: Scale<f32, Mm, Mm> = Scale::new(0.1); s *= scale; assert_eq!(s, SideOffsets2DMm::new(0.2, 0.4, 0.6, 0.8)); } #[test] fn test_div_scalar() { let s = SideOffsets2D::new(10.0, 20.0, 30.0, 40.0); let result = s / 10.0; assert_eq!(result, SideOffsets2D::new(1.0, 2.0, 3.0, 4.0)); } #[test] fn test_div_assign_scalar() { let mut s = SideOffsets2D::new(10.0, 20.0, 30.0, 40.0); s /= 10.0; assert_eq!(s, SideOffsets2D::new(1.0, 2.0, 3.0, 4.0)); } #[test] fn test_div_scale() { let s = SideOffsets2DCm::new(0.1, 0.2, 0.3, 0.4); let cm_per_mm: Scale<f32, Mm, Cm> = Scale::new(0.1); let result = s / cm_per_mm; assert_eq!(result, SideOffsets2DMm::new(1.0, 2.0, 3.0, 4.0)); } #[test] fn test_div_assign_scale() { let mut s = SideOffsets2DMm::new(0.1, 0.2, 0.3, 0.4); let scale: Scale<f32, Mm, Mm> = Scale::new(0.1); s /= scale; assert_eq!(s, SideOffsets2DMm::new(1.0, 2.0, 3.0, 4.0)); } }<|fim▁end|>