prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>filemap.py<|end_file_name|><|fim▁begin|># Copyright 2012 Lee Verberne <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import json import os, os.path import shutil as sh import sys from fabric.api import abort, local, prompt, warn # Fabric 1.0 changed changed the scope of cd() to only affect remote calls. # This bit of kludgery maintains compatibility of this file with fabric 0.9, # but it is only possible because no remote calls are made in this file try: from fabric.api import lcd as cd except ImportError: from fabric.api import cd from ubik import builder, packager # filemap copies files directly from source to root, there is no build step defenv = builder.BuildEnv('_root','_root','.') file_map, file_map_table = None, None def _install_file_map(fmap, installdir): for src, dst in fmap: _install(src, os.path.join(installdir,dst)) def _install(src, dst): if src and os.path.isdir(src): sh.copytree(src, dst) else: if not os.path.exists(os.path.dirname(dst)): os.makedirs(os.path.dirname(dst)) if src: sh.copy(src, dst) def build(pkgtype='deb', env=defenv): 'Builds this package into a directory tree' if file_map: _install_file_map(file_map, env.rootdir) elif file_map_table: _install_file_map(file_map_table[pkgtype], env.rootdir) else: abort("You must register a filemap with this module using register().") def clean(env=defenv): 'Remove build directory and packages' with cd(env.srcdir): local('rm -rf _* *.deb *.rpm', capture=False) local('find . -name \*.pyc -print -exec rm \{\} \;', capture=False) def deb(version=None): 'Build a debian package'<|fim▁hole|> package(version, 'deb') def debiandir(version='0.0', env=defenv): "Generate DEBIAN dir in rootdir, but don't build package" if not env.exists('builddir'): build('deb', env) packager.DebPackage('package.ini', env).debiandir(version) def filelist(pkgtype='deb', env=defenv): '''Outputs default filelist as json (see details) Generates and prints to stdout a filelist json that can be modified and used with package.ini's "filelist" option to override the default. Useful for setting file modes in RPMs''' if not env.exists('builddir'): build(pkgtype, env) packager.Package('package.ini', env).filelist() def package(version=None, pkgtype='deb', env=defenv): 'Creates deployable packages' if not version: version = prompt("What version did you want packaged there, hotshot?") if not env.exists('builddir'): warn('Implicitly invoking build') build(pkgtype, env) pkg = packager.Package('package.ini', env, pkgtype) pkg.build(version) def register(filemap_or_table): 'Register a filemap for use with this module' global file_map, file_map_table if isinstance(filemap_or_table, list): file_map = filemap_or_table elif isinstance(filemap_or_table, dict): file_map_table = filemap_or_table else: abort("I don't even know what you're talking about.") def rpm(version=None): 'Build a Red Hat package' package(version, 'rpm') def rpmspec(version='0.0', env=defenv): 'Output the generated RPM spec file' if not env.exists('builddir'): build('rpm', env) packager.RpmPackage('package.ini', env).rpmspec(sys.stdout, version)<|fim▁end|>
<|file_name|>SABRCMSSpreadNoExtrapolationYCNSFunction.java<|end_file_name|><|fim▁begin|>/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics.model.sabrcube; import static com.opengamma.engine.value.ValueRequirementNames.SABR_SURFACES; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.analytics.financial.interestrate.PresentValueCurveSensitivitySABRCalculator; import com.opengamma.analytics.financial.interestrate.PresentValueNodeSensitivityCalculator; import com.opengamma.analytics.financial.interestrate.YieldCurveBundle; import com.opengamma.analytics.financial.model.option.definition.SABRInterestRateCorrelationParameters; import com.opengamma.analytics.financial.model.option.definition.SABRInterestRateDataBundle; import com.opengamma.analytics.math.function.DoubleFunction1D; import com.opengamma.analytics.math.surface.InterpolatedDoublesSurface; import com.opengamma.engine.ComputationTarget; import com.opengamma.engine.function.FunctionInputs; import com.opengamma.engine.target.ComputationTargetType; import com.opengamma.engine.value.SurfaceAndCubePropertyNames; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.financial.analytics.model.sabr.SABRDiscountingFunction; import com.opengamma.financial.analytics.model.volatility.SmileFittingPropertyNamesAndValues; import com.opengamma.financial.analytics.volatility.fittedresults.SABRFittedSurfaces; import com.opengamma.financial.security.FinancialSecurityTypes; import com.opengamma.financial.security.FinancialSecurityUtils; import com.opengamma.util.money.Currency; /** * @deprecated Use descendants of {@link SABRDiscountingFunction} */ @Deprecated public class SABRCMSSpreadNoExtrapolationYCNSFunction extends SABRYCNSFunction { private static final PresentValueNodeSensitivityCalculator NSC = PresentValueNodeSensitivityCalculator.using(PresentValueCurveSensitivitySABRCalculator.getInstance()); @Override public ComputationTargetType getTargetType() { return FinancialSecurityTypes.CAP_FLOOR_CMS_SPREAD_SECURITY; } @Override protected SABRInterestRateDataBundle getModelParameters(final ComputationTarget target, final FunctionInputs inputs, final Currency currency, final YieldCurveBundle yieldCurves, final ValueRequirement desiredValue) { final Object surfacesObject = inputs.getValue(SABR_SURFACES); if (surfacesObject == null) { throw new OpenGammaRuntimeException("Could not get SABR parameter surfaces"); } final SABRFittedSurfaces surfaces = (SABRFittedSurfaces) surfacesObject; final InterpolatedDoublesSurface alphaSurface = surfaces.getAlphaSurface(); final InterpolatedDoublesSurface betaSurface = surfaces.getBetaSurface(); final InterpolatedDoublesSurface nuSurface = surfaces.getNuSurface(); final InterpolatedDoublesSurface rhoSurface = surfaces.getRhoSurface(); final DoubleFunction1D correlationFunction = getCorrelationFunction(); final SABRInterestRateCorrelationParameters modelParameters = new SABRInterestRateCorrelationParameters(alphaSurface, betaSurface, rhoSurface, nuSurface, correlationFunction); return new SABRInterestRateDataBundle(modelParameters, yieldCurves); } @Override protected ValueProperties.Builder createValueProperties(final Currency currency) { return createValueProperties() .with(ValuePropertyNames.CURRENCY, currency.getCode()) .with(ValuePropertyNames.CURVE_CURRENCY, currency.getCode()) .withAny(ValuePropertyNames.CURVE_CALCULATION_CONFIG) .withAny(ValuePropertyNames.CURVE) .withAny(SurfaceAndCubePropertyNames.PROPERTY_CUBE_DEFINITION) .withAny(SurfaceAndCubePropertyNames.PROPERTY_CUBE_SPECIFICATION) .withAny(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_DEFINITION) .withAny(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_SPECIFICATION) .withAny(SmileFittingPropertyNamesAndValues.PROPERTY_FITTING_METHOD) .with(SmileFittingPropertyNamesAndValues.PROPERTY_VOLATILITY_MODEL, SmileFittingPropertyNamesAndValues.SABR) .with(ValuePropertyNames.CALCULATION_METHOD, SABRFunction.SABR_NO_EXTRAPOLATION); } @Override protected ValueProperties.Builder createValueProperties(final ComputationTarget target, final ValueRequirement desiredValue) { final String cubeDefinitionName = desiredValue.getConstraint(SurfaceAndCubePropertyNames.PROPERTY_CUBE_DEFINITION); final String cubeSpecificationName = desiredValue.getConstraint(SurfaceAndCubePropertyNames.PROPERTY_CUBE_SPECIFICATION); final String surfaceDefinitionName = desiredValue.getConstraint(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_DEFINITION); final String surfaceSpecificationName = desiredValue.getConstraint(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_SPECIFICATION); final String currency = FinancialSecurityUtils.getCurrency(target.getSecurity()).getCode(); final String curveCalculationConfig = desiredValue.getConstraint(ValuePropertyNames.CURVE_CALCULATION_CONFIG); final String fittingMethod = desiredValue.getConstraint(SmileFittingPropertyNamesAndValues.PROPERTY_FITTING_METHOD); final String curveName = desiredValue.getConstraint(ValuePropertyNames.CURVE); return createValueProperties() .with(ValuePropertyNames.CURRENCY, currency) .with(ValuePropertyNames.CURVE_CURRENCY, currency) .with(ValuePropertyNames.CURVE_CALCULATION_CONFIG, curveCalculationConfig) .with(ValuePropertyNames.CURVE, curveName) .with(SurfaceAndCubePropertyNames.PROPERTY_CUBE_DEFINITION, cubeDefinitionName) .with(SurfaceAndCubePropertyNames.PROPERTY_CUBE_SPECIFICATION, cubeSpecificationName) .with(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_DEFINITION, surfaceDefinitionName) .with(SurfaceAndCubePropertyNames.PROPERTY_SURFACE_SPECIFICATION, surfaceSpecificationName) .with(SmileFittingPropertyNamesAndValues.PROPERTY_FITTING_METHOD, fittingMethod) .with(SmileFittingPropertyNamesAndValues.PROPERTY_VOLATILITY_MODEL, SmileFittingPropertyNamesAndValues.SABR) .with(ValuePropertyNames.CALCULATION_METHOD, SABRFunction.SABR_NO_EXTRAPOLATION); } <|fim▁hole|> } private static DoubleFunction1D getCorrelationFunction() { return new DoubleFunction1D() { @Override public Double evaluate(final Double x) { return 0.8; } }; } }<|fim▁end|>
@Override protected PresentValueNodeSensitivityCalculator getNodeSensitivityCalculator(final ValueRequirement desiredValue) { return NSC;
<|file_name|>build.js<|end_file_name|><|fim▁begin|>process.env.NODE_ENV = 'production' var ora = require('ora') var rm = require('rimraf') var path = require('path') var chalk = require('chalk') var webpack = require('webpack') var config = require('../config') var webpackConfig = require('./webpack.prod.conf') var spinner = ora('building for production...') spinner.start() rm(path.join(config.build.assetsRoot, config.build.assetsSubDirectory), err => { if (err) throw err webpack(webpackConfig, function (err, stats) { spinner.stop() if (err) throw err process.stdout.write(stats.toString({ colors: true, modules: false, children: false, chunks: false, chunkModules: false<|fim▁hole|> console.log(chalk.cyan(' Build complete.\n')) console.log(chalk.yellow( ' Tip: built files are meant to be served over an HTTP server.\n' + ' Opening index.html over file:// won\'t work.\n' )) }) })<|fim▁end|>
}) + '\n\n')
<|file_name|>model_json.go<|end_file_name|><|fim▁begin|>// generated by jwg -output misc/fixture/j/model_json.go misc/fixture/j; DO NOT EDIT package j import ( "encoding/json" ) // FooJSON is jsonized struct for Foo. type FooJSON struct { Tmp *Temp `json:"tmp,omitempty"` Bar `json:",omitempty"` *Buzz `json:",omitempty"` HogeJSON `json:",omitempty"` *FugaJSON `json:",omitempty"` } // FooJSONList is synonym about []*FooJSON. type FooJSONList []*FooJSON // FooPropertyEncoder is property encoder for [1]sJSON. type FooPropertyEncoder func(src *Foo, dest *FooJSON) error // FooPropertyDecoder is property decoder for [1]sJSON. type FooPropertyDecoder func(src *FooJSON, dest *Foo) error // FooPropertyInfo stores property information. type FooPropertyInfo struct { fieldName string jsonName string Encoder FooPropertyEncoder Decoder FooPropertyDecoder } // FieldName returns struct field name of property. func (info *FooPropertyInfo) FieldName() string { return info.fieldName } // JSONName returns json field name of property. func (info *FooPropertyInfo) JSONName() string { return info.jsonName } // FooJSONBuilder convert between Foo to FooJSON mutually. type FooJSONBuilder struct { _properties map[string]*FooPropertyInfo _jsonPropertyMap map[string]*FooPropertyInfo _structPropertyMap map[string]*FooPropertyInfo Tmp *FooPropertyInfo Bar *FooPropertyInfo Buzz *FooPropertyInfo Hoge *FooPropertyInfo Fuga *FooPropertyInfo } // NewFooJSONBuilder make new FooJSONBuilder. func NewFooJSONBuilder() *FooJSONBuilder { jb := &FooJSONBuilder{ _properties: map[string]*FooPropertyInfo{}, _jsonPropertyMap: map[string]*FooPropertyInfo{}, _structPropertyMap: map[string]*FooPropertyInfo{}, Tmp: &FooPropertyInfo{ fieldName: "Tmp", jsonName: "tmp", Encoder: func(src *Foo, dest *FooJSON) error { if src == nil { return nil } dest.Tmp = src.Tmp return nil }, Decoder: func(src *FooJSON, dest *Foo) error { if src == nil { return nil } dest.Tmp = src.Tmp return nil }, }, Bar: &FooPropertyInfo{ fieldName: "Bar", jsonName: "", Encoder: func(src *Foo, dest *FooJSON) error { if src == nil { return nil } dest.Bar = src.Bar return nil }, Decoder: func(src *FooJSON, dest *Foo) error { if src == nil { return nil } dest.Bar = src.Bar return nil }, }, Buzz: &FooPropertyInfo{ fieldName: "Buzz", jsonName: "", Encoder: func(src *Foo, dest *FooJSON) error { if src == nil { return nil } dest.Buzz = src.Buzz return nil }, Decoder: func(src *FooJSON, dest *Foo) error { if src == nil { return nil } dest.Buzz = src.Buzz return nil }, }, Hoge: &FooPropertyInfo{ fieldName: "Hoge", jsonName: "", Encoder: func(src *Foo, dest *FooJSON) error { if src == nil { return nil } d, err := NewHogeJSONBuilder().AddAll().Convert(&src.Hoge) if err != nil { return err } dest.HogeJSON = *d return nil }, Decoder: func(src *FooJSON, dest *Foo) error { if src == nil { return nil } d, err := src.HogeJSON.Convert() if err != nil { return err } dest.Hoge = *d return nil }, }, Fuga: &FooPropertyInfo{ fieldName: "Fuga", jsonName: "", Encoder: func(src *Foo, dest *FooJSON) error { if src == nil { return nil } else if src.Fuga == nil { return nil } d, err := NewFugaJSONBuilder().AddAll().Convert(src.Fuga) if err != nil { return err } dest.FugaJSON = d return nil }, Decoder: func(src *FooJSON, dest *Foo) error { if src == nil { return nil } else if src.FugaJSON == nil { return nil } d, err := src.FugaJSON.Convert() if err != nil { return err } dest.Fuga = d return nil }, }, } jb._structPropertyMap["Tmp"] = jb.Tmp jb._jsonPropertyMap["tmp"] = jb.Tmp jb._structPropertyMap["Bar"] = jb.Bar jb._jsonPropertyMap[""] = jb.Bar jb._structPropertyMap["Buzz"] = jb.Buzz jb._jsonPropertyMap[""] = jb.Buzz jb._structPropertyMap["Hoge"] = jb.Hoge jb._jsonPropertyMap[""] = jb.Hoge jb._structPropertyMap["Fuga"] = jb.Fuga jb._jsonPropertyMap[""] = jb.Fuga return jb } // Properties returns all properties on FooJSONBuilder. func (b *FooJSONBuilder) Properties() []*FooPropertyInfo { return []*FooPropertyInfo{ b.Tmp, b.Bar, b.Buzz, b.Hoge, b.Fuga, } } // AddAll adds all property to FooJSONBuilder. func (b *FooJSONBuilder) AddAll() *FooJSONBuilder { b._properties["Tmp"] = b.Tmp b._properties["Bar"] = b.Bar b._properties["Buzz"] = b.Buzz b._properties["Hoge"] = b.Hoge b._properties["Fuga"] = b.Fuga return b } // Add specified property to FooJSONBuilder. func (b *FooJSONBuilder) Add(info *FooPropertyInfo) *FooJSONBuilder { b._properties[info.fieldName] = info return b } // AddByJSONNames add properties to FooJSONBuilder by JSON property name. if name is not in the builder, it will ignore. func (b *FooJSONBuilder) AddByJSONNames(names ...string) *FooJSONBuilder { for _, name := range names { info := b._jsonPropertyMap[name] if info == nil { continue } b._properties[info.fieldName] = info } return b } // AddByNames add properties to FooJSONBuilder by struct property name. if name is not in the builder, it will ignore. func (b *FooJSONBuilder) AddByNames(names ...string) *FooJSONBuilder { for _, name := range names { info := b._structPropertyMap[name] if info == nil { continue } b._properties[info.fieldName] = info } return b } // Remove specified property to FooJSONBuilder. func (b *FooJSONBuilder) Remove(info *FooPropertyInfo) *FooJSONBuilder { delete(b._properties, info.fieldName) return b } // RemoveByJSONNames remove properties to FooJSONBuilder by JSON property name. if name is not in the builder, it will ignore. func (b *FooJSONBuilder) RemoveByJSONNames(names ...string) *FooJSONBuilder { for _, name := range names { info := b._jsonPropertyMap[name] if info == nil { continue } delete(b._properties, info.fieldName) } return b } // RemoveByNames remove properties to FooJSONBuilder by struct property name. if name is not in the builder, it will ignore. func (b *FooJSONBuilder) RemoveByNames(names ...string) *FooJSONBuilder { for _, name := range names { info := b._structPropertyMap[name] if info == nil { continue } delete(b._properties, info.fieldName) } return b } // Convert specified non-JSON object to JSON object. func (b *FooJSONBuilder) Convert(orig *Foo) (*FooJSON, error) { if orig == nil { return nil, nil } ret := &FooJSON{} for _, info := range b._properties { if err := info.Encoder(orig, ret); err != nil { return nil, err } } return ret, nil } // ConvertList specified non-JSON slice to JSONList. func (b *FooJSONBuilder) ConvertList(orig []*Foo) (FooJSONList, error) { if orig == nil { return nil, nil } list := make(FooJSONList, len(orig)) for idx, or := range orig { json, err := b.Convert(or) if err != nil { return nil, err } list[idx] = json } return list, nil } // Convert specified JSON object to non-JSON object. func (orig *FooJSON) Convert() (*Foo, error) { ret := &Foo{} b := NewFooJSONBuilder().AddAll() for _, info := range b._properties { if err := info.Decoder(orig, ret); err != nil { return nil, err } } return ret, nil } // Convert specified JSONList to non-JSON slice. func (jsonList FooJSONList) Convert() ([]*Foo, error) { orig := ([]*FooJSON)(jsonList) list := make([]*Foo, len(orig)) for idx, or := range orig { obj, err := or.Convert() if err != nil { return nil, err } list[idx] = obj } return list, nil } // Marshal non-JSON object to JSON string. func (b *FooJSONBuilder) Marshal(orig *Foo) ([]byte, error) { ret, err := b.Convert(orig) if err != nil { return nil, err } return json.Marshal(ret) } // HogeJSON is jsonized struct for Hoge. type HogeJSON struct { Hoge1 string `json:"hoge1,omitempty"` } // HogeJSONList is synonym about []*HogeJSON. type HogeJSONList []*HogeJSON // HogePropertyEncoder is property encoder for [1]sJSON. type HogePropertyEncoder func(src *Hoge, dest *HogeJSON) error // HogePropertyDecoder is property decoder for [1]sJSON. type HogePropertyDecoder func(src *HogeJSON, dest *Hoge) error // HogePropertyInfo stores property information. type HogePropertyInfo struct { fieldName string jsonName string Encoder HogePropertyEncoder Decoder HogePropertyDecoder } // FieldName returns struct field name of property. func (info *HogePropertyInfo) FieldName() string { return info.fieldName } // JSONName returns json field name of property. func (info *HogePropertyInfo) JSONName() string { return info.jsonName } // HogeJSONBuilder convert between Hoge to HogeJSON mutually. type HogeJSONBuilder struct { _properties map[string]*HogePropertyInfo _jsonPropertyMap map[string]*HogePropertyInfo _structPropertyMap map[string]*HogePropertyInfo Hoge1 *HogePropertyInfo } // NewHogeJSONBuilder make new HogeJSONBuilder. func NewHogeJSONBuilder() *HogeJSONBuilder { jb := &HogeJSONBuilder{ _properties: map[string]*HogePropertyInfo{}, _jsonPropertyMap: map[string]*HogePropertyInfo{}, _structPropertyMap: map[string]*HogePropertyInfo{}, Hoge1: &HogePropertyInfo{ fieldName: "Hoge1", jsonName: "hoge1", Encoder: func(src *Hoge, dest *HogeJSON) error { if src == nil { return nil } dest.Hoge1 = src.Hoge1 return nil }, Decoder: func(src *HogeJSON, dest *Hoge) error { if src == nil { return nil } dest.Hoge1 = src.Hoge1 return nil }, }, } jb._structPropertyMap["Hoge1"] = jb.Hoge1 jb._jsonPropertyMap["hoge1"] = jb.Hoge1 return jb } // Properties returns all properties on HogeJSONBuilder. func (b *HogeJSONBuilder) Properties() []*HogePropertyInfo { return []*HogePropertyInfo{ b.Hoge1, } } // AddAll adds all property to HogeJSONBuilder. func (b *HogeJSONBuilder) AddAll() *HogeJSONBuilder { b._properties["Hoge1"] = b.Hoge1 return b } // Add specified property to HogeJSONBuilder. func (b *HogeJSONBuilder) Add(info *HogePropertyInfo) *HogeJSONBuilder { b._properties[info.fieldName] = info return b } // AddByJSONNames add properties to HogeJSONBuilder by JSON property name. if name is not in the builder, it will ignore. func (b *HogeJSONBuilder) AddByJSONNames(names ...string) *HogeJSONBuilder { for _, name := range names { info := b._jsonPropertyMap[name] if info == nil { continue } b._properties[info.fieldName] = info } return b } // AddByNames add properties to HogeJSONBuilder by struct property name. if name is not in the builder, it will ignore. func (b *HogeJSONBuilder) AddByNames(names ...string) *HogeJSONBuilder { for _, name := range names { info := b._structPropertyMap[name] if info == nil { continue } b._properties[info.fieldName] = info } return b } // Remove specified property to HogeJSONBuilder. func (b *HogeJSONBuilder) Remove(info *HogePropertyInfo) *HogeJSONBuilder { delete(b._properties, info.fieldName) return b } // RemoveByJSONNames remove properties to HogeJSONBuilder by JSON property name. if name is not in the builder, it will ignore. func (b *HogeJSONBuilder) RemoveByJSONNames(names ...string) *HogeJSONBuilder { for _, name := range names { info := b._jsonPropertyMap[name] if info == nil { continue } delete(b._properties, info.fieldName) } return b } // RemoveByNames remove properties to HogeJSONBuilder by struct property name. if name is not in the builder, it will ignore. func (b *HogeJSONBuilder) RemoveByNames(names ...string) *HogeJSONBuilder { for _, name := range names { info := b._structPropertyMap[name] if info == nil {<|fim▁hole|> continue } delete(b._properties, info.fieldName) } return b } // Convert specified non-JSON object to JSON object. func (b *HogeJSONBuilder) Convert(orig *Hoge) (*HogeJSON, error) { if orig == nil { return nil, nil } ret := &HogeJSON{} for _, info := range b._properties { if err := info.Encoder(orig, ret); err != nil { return nil, err } } return ret, nil } // ConvertList specified non-JSON slice to JSONList. func (b *HogeJSONBuilder) ConvertList(orig []*Hoge) (HogeJSONList, error) { if orig == nil { return nil, nil } list := make(HogeJSONList, len(orig)) for idx, or := range orig { json, err := b.Convert(or) if err != nil { return nil, err } list[idx] = json } return list, nil } // Convert specified JSON object to non-JSON object. func (orig *HogeJSON) Convert() (*Hoge, error) { ret := &Hoge{} b := NewHogeJSONBuilder().AddAll() for _, info := range b._properties { if err := info.Decoder(orig, ret); err != nil { return nil, err } } return ret, nil } // Convert specified JSONList to non-JSON slice. func (jsonList HogeJSONList) Convert() ([]*Hoge, error) { orig := ([]*HogeJSON)(jsonList) list := make([]*Hoge, len(orig)) for idx, or := range orig { obj, err := or.Convert() if err != nil { return nil, err } list[idx] = obj } return list, nil } // Marshal non-JSON object to JSON string. func (b *HogeJSONBuilder) Marshal(orig *Hoge) ([]byte, error) { ret, err := b.Convert(orig) if err != nil { return nil, err } return json.Marshal(ret) } // FugaJSON is jsonized struct for Fuga. type FugaJSON struct { Fuga1 string `json:"fuga1,omitempty"` } // FugaJSONList is synonym about []*FugaJSON. type FugaJSONList []*FugaJSON // FugaPropertyEncoder is property encoder for [1]sJSON. type FugaPropertyEncoder func(src *Fuga, dest *FugaJSON) error // FugaPropertyDecoder is property decoder for [1]sJSON. type FugaPropertyDecoder func(src *FugaJSON, dest *Fuga) error // FugaPropertyInfo stores property information. type FugaPropertyInfo struct { fieldName string jsonName string Encoder FugaPropertyEncoder Decoder FugaPropertyDecoder } // FieldName returns struct field name of property. func (info *FugaPropertyInfo) FieldName() string { return info.fieldName } // JSONName returns json field name of property. func (info *FugaPropertyInfo) JSONName() string { return info.jsonName } // FugaJSONBuilder convert between Fuga to FugaJSON mutually. type FugaJSONBuilder struct { _properties map[string]*FugaPropertyInfo _jsonPropertyMap map[string]*FugaPropertyInfo _structPropertyMap map[string]*FugaPropertyInfo Fuga1 *FugaPropertyInfo } // NewFugaJSONBuilder make new FugaJSONBuilder. func NewFugaJSONBuilder() *FugaJSONBuilder { jb := &FugaJSONBuilder{ _properties: map[string]*FugaPropertyInfo{}, _jsonPropertyMap: map[string]*FugaPropertyInfo{}, _structPropertyMap: map[string]*FugaPropertyInfo{}, Fuga1: &FugaPropertyInfo{ fieldName: "Fuga1", jsonName: "fuga1", Encoder: func(src *Fuga, dest *FugaJSON) error { if src == nil { return nil } dest.Fuga1 = src.Fuga1 return nil }, Decoder: func(src *FugaJSON, dest *Fuga) error { if src == nil { return nil } dest.Fuga1 = src.Fuga1 return nil }, }, } jb._structPropertyMap["Fuga1"] = jb.Fuga1 jb._jsonPropertyMap["fuga1"] = jb.Fuga1 return jb } // Properties returns all properties on FugaJSONBuilder. func (b *FugaJSONBuilder) Properties() []*FugaPropertyInfo { return []*FugaPropertyInfo{ b.Fuga1, } } // AddAll adds all property to FugaJSONBuilder. func (b *FugaJSONBuilder) AddAll() *FugaJSONBuilder { b._properties["Fuga1"] = b.Fuga1 return b } // Add specified property to FugaJSONBuilder. func (b *FugaJSONBuilder) Add(info *FugaPropertyInfo) *FugaJSONBuilder { b._properties[info.fieldName] = info return b } // AddByJSONNames add properties to FugaJSONBuilder by JSON property name. if name is not in the builder, it will ignore. func (b *FugaJSONBuilder) AddByJSONNames(names ...string) *FugaJSONBuilder { for _, name := range names { info := b._jsonPropertyMap[name] if info == nil { continue } b._properties[info.fieldName] = info } return b } // AddByNames add properties to FugaJSONBuilder by struct property name. if name is not in the builder, it will ignore. func (b *FugaJSONBuilder) AddByNames(names ...string) *FugaJSONBuilder { for _, name := range names { info := b._structPropertyMap[name] if info == nil { continue } b._properties[info.fieldName] = info } return b } // Remove specified property to FugaJSONBuilder. func (b *FugaJSONBuilder) Remove(info *FugaPropertyInfo) *FugaJSONBuilder { delete(b._properties, info.fieldName) return b } // RemoveByJSONNames remove properties to FugaJSONBuilder by JSON property name. if name is not in the builder, it will ignore. func (b *FugaJSONBuilder) RemoveByJSONNames(names ...string) *FugaJSONBuilder { for _, name := range names { info := b._jsonPropertyMap[name] if info == nil { continue } delete(b._properties, info.fieldName) } return b } // RemoveByNames remove properties to FugaJSONBuilder by struct property name. if name is not in the builder, it will ignore. func (b *FugaJSONBuilder) RemoveByNames(names ...string) *FugaJSONBuilder { for _, name := range names { info := b._structPropertyMap[name] if info == nil { continue } delete(b._properties, info.fieldName) } return b } // Convert specified non-JSON object to JSON object. func (b *FugaJSONBuilder) Convert(orig *Fuga) (*FugaJSON, error) { if orig == nil { return nil, nil } ret := &FugaJSON{} for _, info := range b._properties { if err := info.Encoder(orig, ret); err != nil { return nil, err } } return ret, nil } // ConvertList specified non-JSON slice to JSONList. func (b *FugaJSONBuilder) ConvertList(orig []*Fuga) (FugaJSONList, error) { if orig == nil { return nil, nil } list := make(FugaJSONList, len(orig)) for idx, or := range orig { json, err := b.Convert(or) if err != nil { return nil, err } list[idx] = json } return list, nil } // Convert specified JSON object to non-JSON object. func (orig *FugaJSON) Convert() (*Fuga, error) { ret := &Fuga{} b := NewFugaJSONBuilder().AddAll() for _, info := range b._properties { if err := info.Decoder(orig, ret); err != nil { return nil, err } } return ret, nil } // Convert specified JSONList to non-JSON slice. func (jsonList FugaJSONList) Convert() ([]*Fuga, error) { orig := ([]*FugaJSON)(jsonList) list := make([]*Fuga, len(orig)) for idx, or := range orig { obj, err := or.Convert() if err != nil { return nil, err } list[idx] = obj } return list, nil } // Marshal non-JSON object to JSON string. func (b *FugaJSONBuilder) Marshal(orig *Fuga) ([]byte, error) { ret, err := b.Convert(orig) if err != nil { return nil, err } return json.Marshal(ret) }<|fim▁end|>
<|file_name|>pacman.rs<|end_file_name|><|fim▁begin|>use std::env; use std::ffi::OsString; use std::fs; use std::os::unix::fs::symlink; use std::path::Path; use std::process::{Command, Stdio}; use std::time::Duration; use crossbeam_channel::Sender; use regex::Regex; use serde_derive::Deserialize; use crate::blocks::{Block, ConfigBlock, Update}; use crate::config::SharedConfig; use crate::de::deserialize_duration; use crate::errors::*; use crate::formatting::value::Value; use crate::formatting::FormatTemplate; use crate::protocol::i3bar_event::{I3BarEvent, MouseButton}; use crate::scheduler::Task; use crate::util::has_command; use crate::widgets::text::TextWidget; use crate::widgets::{I3BarWidget, State}; pub struct Pacman { id: usize, output: TextWidget, update_interval: Duration, format: FormatTemplate, format_singular: FormatTemplate, format_up_to_date: FormatTemplate, warning_updates_regex: Option<Regex>, critical_updates_regex: Option<Regex>, watched: Watched, uptodate: bool, hide_when_uptodate: bool, } #[derive(Debug, PartialEq, Eq)] pub enum Watched { None, Pacman, /// cf `Pacman::aur_command` AUR(String), /// cf `Pacman::aur_command` Both(String), } #[derive(Deserialize, Debug, Clone)] #[serde(deny_unknown_fields, default)] pub struct PacmanConfig { /// Update interval in seconds #[serde(deserialize_with = "deserialize_duration")] pub interval: Duration, /// Format override pub format: FormatTemplate, /// Alternative format override for when exactly 1 update is available pub format_singular: FormatTemplate, /// Alternative format override for when no updates are available pub format_up_to_date: FormatTemplate, /// Indicate a `warning` state for the block if any pending update match the /// following regex. Default behaviour is that no package updates are deemed /// warning pub warning_updates_regex: Option<String>, /// Indicate a `critical` state for the block if any pending update match the following regex. /// Default behaviour is that no package updates are deemed critical pub critical_updates_regex: Option<String>, /// Optional AUR command, listing available updates pub aur_command: Option<String>, pub hide_when_uptodate: bool, } impl Default for PacmanConfig { fn default() -> Self { Self { interval: Duration::from_secs(600), format: FormatTemplate::default(), format_singular: FormatTemplate::default(), format_up_to_date: FormatTemplate::default(), warning_updates_regex: None, critical_updates_regex: None, aur_command: None, hide_when_uptodate: false, } } } impl PacmanConfig { fn watched( format: &FormatTemplate, format_singular: &FormatTemplate, format_up_to_date: &FormatTemplate, aur_command: Option<String>, ) -> Result<Watched> { macro_rules! any_format_contains { ($name:expr) => { format.contains($name) || format_singular.contains($name) || format_up_to_date.contains($name) }; } let aur = any_format_contains!("aur"); let pacman = any_format_contains!("pacman") || any_format_contains!("count"); let both = any_format_contains!("both"); if both || (pacman && aur) { let aur_command = aur_command.block_error( "pacman", "{aur} or {both} found in format string but no aur_command supplied", )?; Ok(Watched::Both(aur_command)) } else if pacman && !aur { Ok(Watched::Pacman) } else if !pacman && aur { let aur_command = aur_command.block_error( "pacman", "{aur} found in format string but no aur_command supplied", )?; Ok(Watched::AUR(aur_command)) } else { Ok(Watched::None) } } } impl ConfigBlock for Pacman { type Config = PacmanConfig; fn new( id: usize, block_config: Self::Config, shared_config: SharedConfig, _tx_update_request: Sender<Task>, ) -> Result<Self> { let output = TextWidget::new(id, 0, shared_config).with_icon("update")?; let fmt_normal = block_config.format.with_default("{pacman}")?; let fmt_singular = block_config.format_singular.with_default("{pacman}")?; let fmt_up_to_date = block_config.format_up_to_date.with_default("{pacman}")?; Ok(Pacman { id, update_interval: block_config.interval, output, warning_updates_regex: match block_config.warning_updates_regex { None => None, // no regex configured Some(regex_str) => { let regex = Regex::new(regex_str.as_ref()).map_err(|_| { ConfigurationError( "pacman".to_string(), "invalid warning updates regex".to_string(), ) })?; Some(regex) } }, critical_updates_regex: match block_config.critical_updates_regex { None => None, // no regex configured Some(regex_str) => { let regex = Regex::new(regex_str.as_ref()).map_err(|_| { ConfigurationError( "pacman".to_string(), "invalid critical updates regex".to_string(), ) })?; Some(regex) } }, watched: PacmanConfig::watched( &fmt_normal, &fmt_singular, &fmt_up_to_date, block_config.aur_command, )?, uptodate: false, hide_when_uptodate: block_config.hide_when_uptodate, format: fmt_normal, format_singular: fmt_singular, format_up_to_date: fmt_up_to_date, }) } } fn has_fake_root() -> Result<bool> { has_command("pacman", "fakeroot") } fn check_fakeroot_command_exists() -> Result<()> { if !has_fake_root()? { Err(BlockError( "pacman".to_string(), "fakeroot not found".to_string(), )) } else { Ok(()) } } fn get_updates_db_dir() -> Result<String> { let tmp_dir = env::temp_dir() .into_os_string() .into_string() .block_error("pacman", "There's something wrong with your $TMP variable")?; let user = env::var_os("USER") .unwrap_or_else(|| OsString::from("")) .into_string() .block_error("pacman", "There's a problem with your $USER")?; env::var_os("CHECKUPDATES_DB") .unwrap_or_else(|| OsString::from(format!("{}/checkup-db-{}", tmp_dir, user))) .into_string() .block_error("pacman", "There's a problem with your $CHECKUPDATES_DB") } fn get_pacman_available_updates() -> Result<String> { let updates_db = get_updates_db_dir()?; // Determine pacman database path let db_path = env::var_os("DBPath") .map(Into::into) .unwrap_or_else(|| Path::new("/var/lib/pacman/").to_path_buf()); // Create the determined `checkup-db` path recursively fs::create_dir_all(&updates_db).block_error( "pacman", &format!("Failed to create checkup-db path '{}'", updates_db), )?; // Create symlink to local cache in `checkup-db` if required let local_cache = Path::new(&updates_db).join("local"); if !local_cache.exists() { symlink(db_path.join("local"), local_cache) .block_error("pacman", "Failed to created required symlink")?; } // Update database Command::new("sh") .env("LC_ALL", "C") .args(&[ "-c", &format!( "fakeroot -- pacman -Sy --dbpath \"{}\" --logfile /dev/null", updates_db ), ]) .stdout(Stdio::null()) .status() .block_error("pacman", "Failed to run command")?; // Get updates list String::from_utf8( Command::new("sh") .env("LC_ALL", "C") .args(&[ "-c", &format!("fakeroot pacman -Qu --dbpath \"{}\"", updates_db), ]) .output() .block_error("pacman", "There was a problem running the pacman commands")? .stdout, ) .block_error( "pacman", "There was a problem while converting the output of the pacman command to a string", ) } fn get_aur_available_updates(aur_command: &str) -> Result<String> { String::from_utf8( Command::new("sh") .args(&["-c", aur_command]) .output() .block_error("pacman", &format!("aur command: {} failed", aur_command))? .stdout, ) .block_error( "pacman", "There was a problem while converting the aur command output to a string", ) } fn get_update_count(updates: &str) -> usize { updates .lines() .filter(|line| !line.contains("[ignored]")) .count() } fn has_warning_update(updates: &str, regex: &Regex) -> bool { updates.lines().filter(|line| regex.is_match(line)).count() > 0 } fn has_critical_update(updates: &str, regex: &Regex) -> bool { updates.lines().filter(|line| regex.is_match(line)).count() > 0 } impl Block for Pacman { fn id(&self) -> usize { self.id } fn view(&self) -> Vec<&dyn I3BarWidget> { if self.uptodate && self.hide_when_uptodate { vec![] } else { vec![&self.output] } } fn update(&mut self) -> Result<Option<Update>> { let (formatting_map, warning, critical, cum_count) = match &self.watched { Watched::Pacman => { check_fakeroot_command_exists()?; let pacman_available_updates = get_pacman_available_updates()?; let pacman_count = get_update_count(&pacman_available_updates); let formatting_map = map!( "count" => Value::from_integer(pacman_count as i64), "pacman" => Value::from_integer(pacman_count as i64), ); let warning = self.warning_updates_regex.as_ref().map_or(false, |regex| { has_warning_update(&pacman_available_updates, regex) }); let critical = self.critical_updates_regex.as_ref().map_or(false, |regex| {<|fim▁hole|> has_critical_update(&pacman_available_updates, regex) }); (formatting_map, warning, critical, pacman_count) } Watched::AUR(aur_command) => { let aur_available_updates = get_aur_available_updates(aur_command)?; let aur_count = get_update_count(&aur_available_updates); let formatting_map = map!( "aur" => Value::from_integer(aur_count as i64) ); let warning = self.warning_updates_regex.as_ref().map_or(false, |regex| { has_warning_update(&aur_available_updates, regex) }); let critical = self.critical_updates_regex.as_ref().map_or(false, |regex| { has_critical_update(&aur_available_updates, regex) }); (formatting_map, warning, critical, aur_count) } Watched::Both(aur_command) => { check_fakeroot_command_exists()?; let pacman_available_updates = get_pacman_available_updates()?; let pacman_count = get_update_count(&pacman_available_updates); let aur_available_updates = get_aur_available_updates(aur_command)?; let aur_count = get_update_count(&aur_available_updates); let formatting_map = map!( "count" => Value::from_integer(pacman_count as i64), "pacman" => Value::from_integer(pacman_count as i64), "aur" => Value::from_integer(aur_count as i64), "both" => Value::from_integer((pacman_count + aur_count) as i64), ); let warning = self.warning_updates_regex.as_ref().map_or(false, |regex| { has_warning_update(&aur_available_updates, regex) || has_warning_update(&pacman_available_updates, regex) }); let critical = self.critical_updates_regex.as_ref().map_or(false, |regex| { has_critical_update(&aur_available_updates, regex) || has_critical_update(&pacman_available_updates, regex) }); (formatting_map, warning, critical, pacman_count + aur_count) } Watched::None => (std::collections::HashMap::new(), false, false, 0), }; self.output.set_texts(match cum_count { 0 => self.format_up_to_date.render(&formatting_map)?, 1 => self.format_singular.render(&formatting_map)?, _ => self.format.render(&formatting_map)?, }); self.output.set_state(match cum_count { 0 => State::Idle, _ => { if critical { State::Critical } else if warning { State::Warning } else { State::Info } } }); self.uptodate = cum_count == 0; Ok(Some(self.update_interval.into())) } fn click(&mut self, event: &I3BarEvent) -> Result<()> { if let MouseButton::Left = event.button { self.update()?; } Ok(()) } } #[cfg(test)] mod tests { use crate::blocks::pacman::{ get_aur_available_updates, get_update_count, PacmanConfig, Watched, }; use crate::formatting::FormatTemplate; #[test] fn test_get_update_count() { let no_update = ""; assert_eq!(get_update_count(no_update), 0); let two_updates_available = concat!( "systemd 245.4-2 -> 245.5-1\n", "systemd-libs 245.4-2 -> 245.5-1\n" ); assert_eq!(get_update_count(two_updates_available), 2); } #[test] fn test_watched() { let fmt_count = FormatTemplate::new("foo {count} bar", None).unwrap(); let fmt_pacman = FormatTemplate::new("foo {pacman} bar", None).unwrap(); let fmt_aur = FormatTemplate::new("foo {aur} bar", None).unwrap(); let fmt_pacman_aur = FormatTemplate::new("foo {pacman} {aur} bar", None).unwrap(); let fmt_both = FormatTemplate::new("foo {both} bar", None).unwrap(); let fmt_none = FormatTemplate::new("foo bar", None).unwrap(); let fmt_empty = FormatTemplate::new("", None).unwrap(); let watched = PacmanConfig::watched(&fmt_count, &fmt_count, &fmt_empty, None); assert!(watched.is_ok()); assert_eq!(watched.unwrap(), Watched::Pacman); let watched = PacmanConfig::watched(&fmt_pacman, &fmt_pacman, &fmt_empty, None); assert!(watched.is_ok()); assert_eq!(watched.unwrap(), Watched::Pacman); let watched = PacmanConfig::watched(&fmt_none, &fmt_none, &fmt_empty, None); assert!(watched.is_ok()); // missing formatter should not cause an error let watched = PacmanConfig::watched( &fmt_none, &fmt_none, &fmt_empty, Some("aur cmd".to_string()), ); assert!(watched.is_ok()); // missing formatter should not cause an error let watched = PacmanConfig::watched(&fmt_aur, &fmt_aur, &fmt_empty, Some("aur cmd".to_string())); assert!(watched.is_ok()); assert_eq!(watched.unwrap(), Watched::AUR("aur cmd".to_string())); let watched = PacmanConfig::watched( &fmt_pacman_aur, &fmt_pacman_aur, &fmt_empty, Some("aur cmd".to_string()), ); assert!(watched.is_ok()); assert_eq!(watched.unwrap(), Watched::Both("aur cmd".to_string())); let watched = PacmanConfig::watched(&fmt_pacman_aur, &fmt_pacman_aur, &fmt_empty, None); assert!(watched.is_err()); // missing aur command let watched = PacmanConfig::watched(&fmt_both, &fmt_both, &fmt_empty, None); assert!(watched.is_err()); // missing aur command let watched = PacmanConfig::watched( &fmt_both, &fmt_both, &fmt_empty, Some("aur cmd".to_string()), ); assert!(watched.is_ok()); assert_eq!(watched.unwrap(), Watched::Both("aur cmd".to_string())); } #[test] fn test_get_aur_available_updates() { // aur_command should behave as echo -ne "foo x.x -> y.y\n" let updates = "foo x.x -> y.y\nbar x.x -> y.y\n"; let aur_command = format!("printf '{}'", updates); let available_updates = get_aur_available_updates(&aur_command); assert!(available_updates.is_ok()); assert_eq!(available_updates.unwrap(), updates); } }<|fim▁end|>
<|file_name|>runtest.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use self::TargetLocation::*; use common::Config; use common::{CompileFail, Pretty, RunFail, RunPass, RunPassValgrind, DebugInfoGdb}; use common::{Codegen, DebugInfoLldb}; use errors; use header::TestProps; use header; use procsrv; use util::logv; #[cfg(target_os = "windows")] use util; #[cfg(target_os = "windows")] use std::ascii::AsciiExt; use std::old_io::File; use std::old_io::fs::PathExtensions; use std::old_io::fs; use std::old_io::net::tcp; use std::old_io::process::ProcessExit; use std::old_io::process; use std::old_io::timer; use std::old_io; use std::env; use std::iter::repeat; use std::str; use std::string::String; use std::thread; use std::time::Duration; use test::MetricMap; pub fn run(config: Config, testfile: String) { match &*config.target { "arm-linux-androideabi" | "aarch64-linux-android" => { if !config.adb_device_status { panic!("android device not available"); } } _=> { } } let mut _mm = MetricMap::new(); run_metrics(config, testfile, &mut _mm); } pub fn run_metrics(config: Config, testfile: String, mm: &mut MetricMap) { if config.verbose { // We're going to be dumping a lot of info. Start on a new line. print!("\n\n"); } let testfile = Path::new(testfile); debug!("running {:?}", testfile.display()); let props = header::load_props(&testfile); debug!("loaded props"); match config.mode { CompileFail => run_cfail_test(&config, &props, &testfile), RunFail => run_rfail_test(&config, &props, &testfile), RunPass => run_rpass_test(&config, &props, &testfile), RunPassValgrind => run_valgrind_test(&config, &props, &testfile), Pretty => run_pretty_test(&config, &props, &testfile), DebugInfoGdb => run_debuginfo_gdb_test(&config, &props, &testfile), DebugInfoLldb => run_debuginfo_lldb_test(&config, &props, &testfile), Codegen => run_codegen_test(&config, &props, &testfile, mm), } } fn get_output(props: &TestProps, proc_res: &ProcRes) -> String { if props.check_stdout { format!("{}{}", proc_res.stdout, proc_res.stderr) } else { proc_res.stderr.clone() } } fn run_cfail_test(config: &Config, props: &TestProps, testfile: &Path) { let proc_res = compile_test(config, props, testfile); if proc_res.status.success() { fatal_proc_rec("compile-fail test compiled successfully!", &proc_res); } check_correct_failure_status(&proc_res); if proc_res.status.success() { fatal("process did not return an error status"); } let output_to_check = get_output(props, &proc_res); let expected_errors = errors::load_errors(testfile); if !expected_errors.is_empty() { if !props.error_patterns.is_empty() { fatal("both error pattern and expected errors specified"); } check_expected_errors(expected_errors, testfile, &proc_res); } else { check_error_patterns(props, testfile, &output_to_check, &proc_res); } check_no_compiler_crash(&proc_res); check_forbid_output(props, &output_to_check, &proc_res); } fn run_rfail_test(config: &Config, props: &TestProps, testfile: &Path) { let proc_res = if !config.jit { let proc_res = compile_test(config, props, testfile); if !proc_res.status.success() { fatal_proc_rec("compilation failed!", &proc_res); } exec_compiled_test(config, props, testfile) } else { jit_test(config, props, testfile) }; // The value our Makefile configures valgrind to return on failure static VALGRIND_ERR: int = 100; if proc_res.status.matches_exit_status(VALGRIND_ERR) { fatal_proc_rec("run-fail test isn't valgrind-clean!", &proc_res); } let output_to_check = get_output(props, &proc_res); check_correct_failure_status(&proc_res); check_error_patterns(props, testfile, &output_to_check, &proc_res); } fn check_correct_failure_status(proc_res: &ProcRes) { // The value the rust runtime returns on failure static RUST_ERR: int = 101; if !proc_res.status.matches_exit_status(RUST_ERR) { fatal_proc_rec( &format!("failure produced the wrong error: {:?}", proc_res.status), proc_res); } } fn run_rpass_test(config: &Config, props: &TestProps, testfile: &Path) { if !config.jit { let mut proc_res = compile_test(config, props, testfile); if !proc_res.status.success() { fatal_proc_rec("compilation failed!", &proc_res); } proc_res = exec_compiled_test(config, props, testfile); if !proc_res.status.success() { fatal_proc_rec("test run failed!", &proc_res); } } else { let proc_res = jit_test(config, props, testfile); if !proc_res.status.success() { fatal_proc_rec("jit failed!", &proc_res); } } } fn run_valgrind_test(config: &Config, props: &TestProps, testfile: &Path) { if config.valgrind_path.is_none() { assert!(!config.force_valgrind); return run_rpass_test(config, props, testfile); } let mut proc_res = compile_test(config, props, testfile); if !proc_res.status.success() { fatal_proc_rec("compilation failed!", &proc_res); } let mut new_config = config.clone(); new_config.runtool = new_config.valgrind_path.clone(); proc_res = exec_compiled_test(&new_config, props, testfile); if !proc_res.status.success() { fatal_proc_rec("test run failed!", &proc_res); } } fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) { if props.pp_exact.is_some() { logv(config, "testing for exact pretty-printing".to_string()); } else { logv(config, "testing for converging pretty-printing".to_string()); } let rounds = match props.pp_exact { Some(_) => 1, None => 2 }; let src = File::open(testfile).read_to_end().unwrap(); let src = String::from_utf8(src.clone()).unwrap(); let mut srcs = vec!(src); let mut round = 0; while round < rounds { logv(config, format!("pretty-printing round {}", round)); let proc_res = print_source(config, props, testfile, srcs[round].to_string(), &props.pretty_mode); if !proc_res.status.success() { fatal_proc_rec(&format!("pretty-printing failed in round {}", round), &proc_res); } let ProcRes{ stdout, .. } = proc_res; srcs.push(stdout); round += 1; } let mut expected = match props.pp_exact { Some(ref file) => { let filepath = testfile.dir_path().join(file); let s = File::open(&filepath).read_to_end().unwrap(); String::from_utf8(s).unwrap() } None => { srcs[srcs.len() - 2].clone() } }; let mut actual = srcs[srcs.len() - 1].clone(); if props.pp_exact.is_some() { // Now we have to care about line endings let cr = "\r".to_string(); actual = actual.replace(&cr, "").to_string(); expected = expected.replace(&cr, "").to_string(); } compare_source(&expected, &actual); // If we're only making sure that the output matches then just stop here if props.pretty_compare_only { return; } // Finally, let's make sure it actually appears to remain valid code let proc_res = typecheck_source(config, props, testfile, actual); if !proc_res.status.success() { fatal_proc_rec("pretty-printed source does not typecheck", &proc_res); } if props.no_pretty_expanded { return } // additionally, run `--pretty expanded` and try to build it. let proc_res = print_source(config, props, testfile, srcs[round].clone(), "expanded"); if !proc_res.status.success() { fatal_proc_rec("pretty-printing (expanded) failed", &proc_res); } let ProcRes{ stdout: expanded_src, .. } = proc_res; let proc_res = typecheck_source(config, props, testfile, expanded_src); if !proc_res.status.success() { fatal_proc_rec("pretty-printed source (expanded) does not typecheck", &proc_res); } return; fn print_source(config: &Config, props: &TestProps, testfile: &Path, src: String, pretty_type: &str) -> ProcRes { let aux_dir = aux_output_dir_name(config, testfile); compose_and_run(config, testfile, make_pp_args(config, props, testfile, pretty_type.to_string()), props.exec_env.clone(), &config.compile_lib_path, Some(aux_dir.as_str().unwrap()), Some(src)) } fn make_pp_args(config: &Config, props: &TestProps, testfile: &Path, pretty_type: String) -> ProcArgs { let aux_dir = aux_output_dir_name(config, testfile); // FIXME (#9639): This needs to handle non-utf8 paths let mut args = vec!("-".to_string(), "-Zunstable-options".to_string(), "--pretty".to_string(), pretty_type, format!("--target={}", config.target), "-L".to_string(), aux_dir.as_str().unwrap().to_string()); args.extend(split_maybe_args(&config.target_rustcflags).into_iter()); args.extend(split_maybe_args(&props.compile_flags).into_iter()); return ProcArgs { prog: config.rustc_path.as_str().unwrap().to_string(), args: args, }; } fn compare_source(expected: &str, actual: &str) { if expected != actual { error("pretty-printed source does not match expected source"); println!("\n\ expected:\n\ ------------------------------------------\n\ {}\n\ ------------------------------------------\n\ actual:\n\ ------------------------------------------\n\ {}\n\ ------------------------------------------\n\ \n", expected, actual); panic!(); } } fn typecheck_source(config: &Config, props: &TestProps, testfile: &Path, src: String) -> ProcRes { let args = make_typecheck_args(config, props, testfile); compose_and_run_compiler(config, props, testfile, args, Some(src)) } fn make_typecheck_args(config: &Config, props: &TestProps, testfile: &Path) -> ProcArgs { let aux_dir = aux_output_dir_name(config, testfile); let target = if props.force_host { &*config.host } else { &*config.target }; // FIXME (#9639): This needs to handle non-utf8 paths let mut args = vec!("-".to_string(), "-Zno-trans".to_string(), "--crate-type=lib".to_string(), format!("--target={}", target), "-L".to_string(), config.build_base.as_str().unwrap().to_string(), "-L".to_string(), aux_dir.as_str().unwrap().to_string()); args.extend(split_maybe_args(&config.target_rustcflags).into_iter()); args.extend(split_maybe_args(&props.compile_flags).into_iter()); // FIXME (#9639): This needs to handle non-utf8 paths return ProcArgs { prog: config.rustc_path.as_str().unwrap().to_string(), args: args, }; } } fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) { let mut config = Config { target_rustcflags: cleanup_debug_info_options(&config.target_rustcflags), host_rustcflags: cleanup_debug_info_options(&config.host_rustcflags), .. config.clone() }; let config = &mut config; let DebuggerCommands { commands, check_lines, breakpoint_lines } = parse_debugger_commands(testfile, "gdb"); let mut cmds = commands.connect("\n"); // compile test file (it should have 'compile-flags:-g' in the header) let compiler_run_result = compile_test(config, props, testfile); if !compiler_run_result.status.success() { fatal_proc_rec("compilation failed!", &compiler_run_result); } let exe_file = make_exe_name(config, testfile); let debugger_run_result; match &*config.target { "arm-linux-androideabi" | "aarch64-linux-android" => { cmds = cmds.replace("run", "continue"); // write debugger script let mut script_str = String::with_capacity(2048); script_str.push_str("set charset UTF-8\n"); script_str.push_str(&format!("file {}\n", exe_file.as_str().unwrap())); script_str.push_str("target remote :5039\n"); script_str.push_str(&format!("set solib-search-path \ ./{}/stage2/lib/rustlib/{}/lib/\n", config.host, config.target)); for line in breakpoint_lines.iter() { script_str.push_str(&format!("break {:?}:{}\n", testfile.filename_display(), *line)[]); } script_str.push_str(&cmds); script_str.push_str("quit\n"); debug!("script_str = {}", script_str); dump_output_file(config, testfile, &script_str, "debugger.script"); procsrv::run("", &config.adb_path, None, &[ "push".to_string(), exe_file.as_str().unwrap().to_string(), config.adb_test_dir.clone() ], vec!(("".to_string(), "".to_string())), Some("".to_string())) .expect(&format!("failed to exec `{:?}`", config.adb_path)); procsrv::run("", &config.adb_path, None, &[ "forward".to_string(), "tcp:5039".to_string(), "tcp:5039".to_string() ], vec!(("".to_string(), "".to_string())), Some("".to_string())) .expect(&format!("failed to exec `{:?}`", config.adb_path)); let adb_arg = format!("export LD_LIBRARY_PATH={}; \ gdbserver{} :5039 {}/{}", config.adb_test_dir.clone(), if config.target.contains("aarch64") {"64"} else {""}, config.adb_test_dir.clone(), str::from_utf8( exe_file.filename() .unwrap()).unwrap()); let mut process = procsrv::run_background("", &config.adb_path , None, &[ "shell".to_string(), adb_arg.clone() ], vec!(("".to_string(), "".to_string())), Some("".to_string())) .expect(&format!("failed to exec `{:?}`", config.adb_path)); loop { //waiting 1 second for gdbserver start timer::sleep(Duration::milliseconds(1000)); let result = thread::spawn(move || { tcp::TcpStream::connect("127.0.0.1:5039").unwrap(); }).join(); if result.is_err() { continue; } break; } let tool_path = match config.android_cross_path.as_str() { Some(x) => x.to_string(), None => fatal("cannot find android cross path") }; let debugger_script = make_out_name(config, testfile, "debugger.script"); // FIXME (#9639): This needs to handle non-utf8 paths let debugger_opts = vec!("-quiet".to_string(), "-batch".to_string(), "-nx".to_string(), format!("-command={}", debugger_script.as_str().unwrap())); let mut gdb_path = tool_path; gdb_path.push_str(&format!("/bin/{}-gdb", config.target)); let procsrv::Result { out, err, status } = procsrv::run("", &gdb_path, None, &debugger_opts, vec!(("".to_string(), "".to_string())), None) .expect(&format!("failed to exec `{:?}`", gdb_path)); let cmdline = { let cmdline = make_cmdline("", &format!("{}-gdb", config.target), &debugger_opts); logv(config, format!("executing {}", cmdline)); cmdline }; debugger_run_result = ProcRes { status: status, stdout: out, stderr: err, cmdline: cmdline }; if process.signal_kill().is_err() { println!("Adb process is already finished."); } } _=> { let rust_src_root = find_rust_src_root(config) .expect("Could not find Rust source root"); let rust_pp_module_rel_path = Path::new("./src/etc"); let rust_pp_module_abs_path = rust_src_root.join(rust_pp_module_rel_path) .as_str() .unwrap() .to_string(); // write debugger script let mut script_str = String::with_capacity(2048); script_str.push_str("set charset UTF-8\n"); script_str.push_str("show version\n"); match config.gdb_version { Some(ref version) => { println!("NOTE: compiletest thinks it is using GDB version {}", version); if header::gdb_version_to_int(version) > header::gdb_version_to_int("7.4") { // Add the directory containing the pretty printers to // GDB's script auto loading safe path script_str.push_str( &format!("add-auto-load-safe-path {}\n", rust_pp_module_abs_path.replace("\\", "\\\\")) ); } } _ => { println!("NOTE: compiletest does not know which version of \ GDB it is using"); } } // The following line actually doesn't have to do anything with // pretty printing, it just tells GDB to print values on one line: script_str.push_str("set print pretty off\n"); // Add the pretty printer directory to GDB's source-file search path script_str.push_str(&format!("directory {}\n", rust_pp_module_abs_path)[]); // Load the target executable script_str.push_str(&format!("file {}\n", exe_file.as_str().unwrap().replace("\\", "\\\\"))[]); // Add line breakpoints for line in &breakpoint_lines { script_str.push_str(&format!("break '{}':{}\n", testfile.filename_display(), *line)[]); } script_str.push_str(&cmds); script_str.push_str("quit\n"); debug!("script_str = {}", script_str); dump_output_file(config, testfile, &script_str, "debugger.script"); // run debugger script with gdb #[cfg(windows)] fn debugger() -> String { "gdb.exe".to_string() } #[cfg(unix)] fn debugger() -> String { "gdb".to_string() } let debugger_script = make_out_name(config, testfile, "debugger.script"); // FIXME (#9639): This needs to handle non-utf8 paths let debugger_opts = vec!("-quiet".to_string(), "-batch".to_string(), "-nx".to_string(), format!("-command={}", debugger_script.as_str().unwrap())); let proc_args = ProcArgs { prog: debugger(), args: debugger_opts, }; let environment = vec![("PYTHONPATH".to_string(), rust_pp_module_abs_path)]; debugger_run_result = compose_and_run(config, testfile, proc_args, environment, &config.run_lib_path, None, None); } } if !debugger_run_result.status.success() { fatal("gdb failed to execute"); } check_debugger_output(&debugger_run_result, &check_lines); } fn find_rust_src_root(config: &Config) -> Option<Path> { let mut path = config.src_base.clone(); let path_postfix = Path::new("src/etc/lldb_batchmode.py"); while path.pop() { if path.join(&path_postfix).is_file() { return Some(path); } } return None; } fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path) { use std::old_io::process::{Command, ProcessOutput}; if config.lldb_python_dir.is_none() { fatal("Can't run LLDB test because LLDB's python path is not set."); } let mut config = Config { target_rustcflags: cleanup_debug_info_options(&config.target_rustcflags), host_rustcflags: cleanup_debug_info_options(&config.host_rustcflags), .. config.clone() }; let config = &mut config; // compile test file (it should have 'compile-flags:-g' in the header) let compile_result = compile_test(config, props, testfile); if !compile_result.status.success() { fatal_proc_rec("compilation failed!", &compile_result); } let exe_file = make_exe_name(config, testfile); match config.lldb_version { Some(ref version) => { println!("NOTE: compiletest thinks it is using LLDB version {}", version); } _ => { println!("NOTE: compiletest does not know which version of \ LLDB it is using"); } } // Parse debugger commands etc from test files let DebuggerCommands { commands, check_lines, breakpoint_lines, .. } = parse_debugger_commands(testfile, "lldb"); // Write debugger script: // We don't want to hang when calling `quit` while the process is still running let mut script_str = String::from_str("settings set auto-confirm true\n"); // Make LLDB emit its version, so we have it documented in the test output script_str.push_str("version\n"); // Switch LLDB into "Rust mode" let rust_src_root = find_rust_src_root(config) .expect("Could not find Rust source root"); let rust_pp_module_rel_path = Path::new("./src/etc/lldb_rust_formatters.py"); let rust_pp_module_abs_path = rust_src_root.join(rust_pp_module_rel_path) .as_str() .unwrap() .to_string(); script_str.push_str(&format!("command script import {}\n", &rust_pp_module_abs_path[])[]); script_str.push_str("type summary add --no-value "); script_str.push_str("--python-function lldb_rust_formatters.print_val "); script_str.push_str("-x \".*\" --category Rust\n"); script_str.push_str("type category enable Rust\n"); // Set breakpoints on every line that contains the string "#break" for line in &breakpoint_lines { script_str.push_str(&format!("breakpoint set --line {}\n", line)); } // Append the other commands for line in &commands { script_str.push_str(line); script_str.push_str("\n"); } // Finally, quit the debugger script_str.push_str("quit\n"); // Write the script into a file debug!("script_str = {}", script_str); dump_output_file(config, testfile, &script_str, "debugger.script"); let debugger_script = make_out_name(config, testfile, "debugger.script"); // Let LLDB execute the script via lldb_batchmode.py let debugger_run_result = run_lldb(config, &exe_file, &debugger_script, &rust_src_root); if !debugger_run_result.status.success() { fatal_proc_rec("Error while running LLDB", &debugger_run_result); } check_debugger_output(&debugger_run_result, &check_lines); fn run_lldb(config: &Config, test_executable: &Path, debugger_script: &Path, rust_src_root: &Path) -> ProcRes { // Prepare the lldb_batchmode which executes the debugger script let lldb_script_path = rust_src_root.join(Path::new("./src/etc/lldb_batchmode.py")); let mut cmd = Command::new("python"); cmd.arg(lldb_script_path) .arg(test_executable) .arg(debugger_script) .env_set_all(&[("PYTHONPATH", config.lldb_python_dir.clone().unwrap())]); let (status, out, err) = match cmd.spawn() { Ok(process) => { let ProcessOutput { status, output, error } = process.wait_with_output().unwrap(); (status, String::from_utf8(output).unwrap(), String::from_utf8(error).unwrap()) }, Err(e) => { fatal(&format!("Failed to setup Python process for \ LLDB script: {}", e)) } }; dump_output(config, test_executable, &out, &err); return ProcRes { status: status, stdout: out, stderr: err, cmdline: format!("{:?}", cmd) }; } } struct DebuggerCommands { commands: Vec<String>, check_lines: Vec<String>, breakpoint_lines: Vec<uint>, } fn parse_debugger_commands(file_path: &Path, debugger_prefix: &str) -> DebuggerCommands { use std::old_io::{BufferedReader, File}; let command_directive = format!("{}-command", debugger_prefix); let check_directive = format!("{}-check", debugger_prefix); let mut breakpoint_lines = vec!(); let mut commands = vec!(); let mut check_lines = vec!(); let mut counter = 1; let mut reader = BufferedReader::new(File::open(file_path).unwrap()); for line in reader.lines() { match line { Ok(line) => { if line.contains("#break") { breakpoint_lines.push(counter); } header::parse_name_value_directive( &line, &command_directive).map(|cmd| { commands.push(cmd) }); header::parse_name_value_directive( &line, &check_directive).map(|cmd| { check_lines.push(cmd) }); } Err(e) => { fatal(&format!("Error while parsing debugger commands: {}", e)) } } counter += 1; } DebuggerCommands { commands: commands, check_lines: check_lines, breakpoint_lines: breakpoint_lines, } } fn cleanup_debug_info_options(options: &Option<String>) -> Option<String> { if options.is_none() { return None; } // Remove options that are either unwanted (-O) or may lead to duplicates due to RUSTFLAGS. let options_to_remove = [ "-O".to_string(), "-g".to_string(), "--debuginfo".to_string() ]; let new_options = split_maybe_args(options).into_iter() .filter(|x| !options_to_remove.contains(x)) .collect::<Vec<String>>() .connect(" "); Some(new_options) } fn check_debugger_output(debugger_run_result: &ProcRes, check_lines: &[String]) { let num_check_lines = check_lines.len(); if num_check_lines > 0 { // Allow check lines to leave parts unspecified (e.g., uninitialized // bits in the wrong case of an enum) with the notation "[...]". let check_fragments: Vec<Vec<String>> = check_lines.iter().map(|s| { s .trim() .split_str("[...]") .map(|x| x.to_string()) .collect() }).collect(); // check if each line in props.check_lines appears in the // output (in order) let mut i = 0; for line in debugger_run_result.stdout.lines() { let mut rest = line.trim(); let mut first = true; let mut failed = false; for frag in &check_fragments[i] { let found = if first { if rest.starts_with(frag) { Some(0) } else { None } } else { rest.find_str(frag) }; match found { None => { failed = true; break; } Some(i) => { rest = &rest[(i + frag.len())..]; } } first = false; } if !failed && rest.len() == 0 { i += 1; } if i == num_check_lines { // all lines checked break; } } if i != num_check_lines { fatal_proc_rec(&format!("line not found in debugger output: {}", check_lines.get(i).unwrap()), debugger_run_result); } } } fn check_error_patterns(props: &TestProps, testfile: &Path, output_to_check: &str, proc_res: &ProcRes) { if props.error_patterns.is_empty() { fatal(&format!("no error pattern specified in {:?}", testfile.display())); } let mut next_err_idx = 0; let mut next_err_pat = &props.error_patterns[next_err_idx]; let mut done = false; for line in output_to_check.lines() { if line.contains(next_err_pat) { debug!("found error pattern {}", next_err_pat); next_err_idx += 1; if next_err_idx == props.error_patterns.len() { debug!("found all error patterns"); done = true; break; } next_err_pat = &props.error_patterns[next_err_idx]; } } if done { return; } let missing_patterns = &props.error_patterns[next_err_idx..]; if missing_patterns.len() == 1 { fatal_proc_rec(&format!("error pattern '{}' not found!", missing_patterns[0]), proc_res); } else { for pattern in missing_patterns { error(&format!("error pattern '{}' not found!", *pattern)); } fatal_proc_rec("multiple error patterns not found", proc_res); } } fn check_no_compiler_crash(proc_res: &ProcRes) { for line in proc_res.stderr.lines() { if line.starts_with("error: internal compiler error:") { fatal_proc_rec("compiler encountered internal error", proc_res); } } } fn check_forbid_output(props: &TestProps, output_to_check: &str, proc_res: &ProcRes) { for pat in &props.forbid_output { if output_to_check.contains(pat) { fatal_proc_rec("forbidden pattern found in compiler output", proc_res); } } } fn check_expected_errors(expected_errors: Vec<errors::ExpectedError> , testfile: &Path, proc_res: &ProcRes) { // true if we found the error in question let mut found_flags: Vec<_> = repeat(false).take(expected_errors.len()).collect(); if proc_res.status.success() { fatal("process did not return an error status"); } let prefixes = expected_errors.iter().map(|ee| { format!("{}:{}:", testfile.display(), ee.line) }).collect::<Vec<String> >(); #[cfg(windows)] fn prefix_matches( line : &str, prefix : &str ) -> bool { line.to_ascii_lowercase().starts_with(&prefix.to_ascii_lowercase()) } #[cfg(unix)] fn prefix_matches( line : &str, prefix : &str ) -> bool { line.starts_with( prefix ) } // A multi-line error will have followup lines which will always // start with one of these strings. fn continuation( line: &str) -> bool { line.starts_with(" expected") || line.starts_with(" found") || // 1234 // Should have 4 spaces: see issue 18946 line.starts_with("(") } // Scan and extract our error/warning messages, // which look like: // filename:line1:col1: line2:col2: *error:* msg // filename:line1:col1: line2:col2: *warning:* msg // where line1:col1: is the starting point, line2:col2: // is the ending point, and * represents ANSI color codes. for line in proc_res.stderr.lines() { let mut was_expected = false; for (i, ee) in expected_errors.iter().enumerate() { if !found_flags[i] { debug!("prefix={} ee.kind={} ee.msg={} line={}", prefixes[i], ee.kind, ee.msg, line); if (prefix_matches(line, &prefixes[i]) || continuation(line)) && line.contains(&ee.kind) && line.contains(&ee.msg) { found_flags[i] = true; was_expected = true; break; } } } // ignore this msg which gets printed at the end if line.contains("aborting due to") { was_expected = true; } if !was_expected && is_compiler_error_or_warning(line) { fatal_proc_rec(&format!("unexpected compiler error or warning: '{}'", line), proc_res); } } for (i, &flag) in found_flags.iter().enumerate() { if !flag { let ee = &expected_errors[i]; fatal_proc_rec(&format!("expected {} on line {} not found: {}", ee.kind, ee.line, ee.msg), proc_res); } } } fn is_compiler_error_or_warning(line: &str) -> bool { let mut i = 0; return scan_until_char(line, ':', &mut i) && scan_char(line, ':', &mut i) && scan_integer(line, &mut i) && scan_char(line, ':', &mut i) && scan_integer(line, &mut i) && scan_char(line, ':', &mut i) && scan_char(line, ' ', &mut i) && scan_integer(line, &mut i) && scan_char(line, ':', &mut i) && scan_integer(line, &mut i) && scan_char(line, ' ', &mut i) && (scan_string(line, "error", &mut i) || scan_string(line, "warning", &mut i)); } fn scan_until_char(haystack: &str, needle: char, idx: &mut uint) -> bool { if *idx >= haystack.len() { return false; } let opt = haystack[(*idx)..].find(needle); if opt.is_none() { return false; } *idx = opt.unwrap(); return true; } fn scan_char(haystack: &str, needle: char, idx: &mut uint) -> bool { if *idx >= haystack.len() { return false; } let range = haystack.char_range_at(*idx); if range.ch != needle { return false; } *idx = range.next; return true; } fn scan_integer(haystack: &str, idx: &mut uint) -> bool { let mut i = *idx; while i < haystack.len() { let range = haystack.char_range_at(i); if range.ch < '0' || '9' < range.ch { break; } i = range.next; } if i == *idx { return false; } *idx = i; return true; } fn scan_string(haystack: &str, needle: &str, idx: &mut uint) -> bool { let mut haystack_i = *idx; let mut needle_i = 0; while needle_i < needle.len() { if haystack_i >= haystack.len() { return false; } let range = haystack.char_range_at(haystack_i); haystack_i = range.next; if !scan_char(needle, range.ch, &mut needle_i) { return false; } } *idx = haystack_i; return true; } struct ProcArgs { prog: String, args: Vec<String>, } struct ProcRes { status: ProcessExit, stdout: String, stderr: String, cmdline: String, } fn compile_test(config: &Config, props: &TestProps, testfile: &Path) -> ProcRes { compile_test_(config, props, testfile, &[]) } fn jit_test(config: &Config, props: &TestProps, testfile: &Path) -> ProcRes { compile_test_(config, props, testfile, &["--jit".to_string()]) } fn compile_test_(config: &Config, props: &TestProps, testfile: &Path, extra_args: &[String]) -> ProcRes { let aux_dir = aux_output_dir_name(config, testfile); // FIXME (#9639): This needs to handle non-utf8 paths let mut link_args = vec!("-L".to_string(), aux_dir.as_str().unwrap().to_string()); link_args.extend(extra_args.iter().map(|s| s.clone())); let args = make_compile_args(config, props, link_args, |a, b| TargetLocation::ThisFile(make_exe_name(a, b)), testfile); compose_and_run_compiler(config, props, testfile, args, None) } fn exec_compiled_test(config: &Config, props: &TestProps, testfile: &Path) -> ProcRes { let env = props.exec_env.clone(); match &*config.target { "arm-linux-androideabi" | "aarch64-linux-android" => { _arm_exec_compiled_test(config, props, testfile, env) } _=> { let aux_dir = aux_output_dir_name(config, testfile); compose_and_run(config, testfile, make_run_args(config, props, testfile), env, &config.run_lib_path, Some(aux_dir.as_str().unwrap()), None) } } } fn compose_and_run_compiler( config: &Config, props: &TestProps, testfile: &Path, args: ProcArgs, input: Option<String>) -> ProcRes { if !props.aux_builds.is_empty() { ensure_dir(&aux_output_dir_name(config, testfile)); } let aux_dir = aux_output_dir_name(config, testfile); // FIXME (#9639): This needs to handle non-utf8 paths let extra_link_args = vec!("-L".to_string(), aux_dir.as_str().unwrap().to_string()); for rel_ab in &props.aux_builds { let abs_ab = config.aux_base.join(rel_ab); let aux_props = header::load_props(&abs_ab); let mut crate_type = if aux_props.no_prefer_dynamic { Vec::new() } else { vec!("--crate-type=dylib".to_string()) }; crate_type.extend(extra_link_args.clone().into_iter()); let aux_args = make_compile_args(config, &aux_props, crate_type, |a,b| { let f = make_lib_name(a, b, testfile); TargetLocation::ThisDirectory(f.dir_path()) }, &abs_ab); let auxres = compose_and_run(config, &abs_ab, aux_args, Vec::new(), &config.compile_lib_path, Some(aux_dir.as_str().unwrap()), None); if !auxres.status.success() { fatal_proc_rec( &format!("auxiliary build of {:?} failed to compile: ", abs_ab.display()), &auxres); } match &*config.target { "arm-linux-androideabi" | "aarch64-linux-android" => { _arm_push_aux_shared_library(config, testfile); } _ => {} } } compose_and_run(config, testfile, args, Vec::new(), &config.compile_lib_path, Some(aux_dir.as_str().unwrap()), input) } fn ensure_dir(path: &Path) { if path.is_dir() { return; } fs::mkdir(path, old_io::USER_RWX).unwrap(); } fn compose_and_run(config: &Config, testfile: &Path, ProcArgs{ args, prog }: ProcArgs, procenv: Vec<(String, String)> , lib_path: &str, aux_path: Option<&str>, input: Option<String>) -> ProcRes { return program_output(config, testfile, lib_path, prog, aux_path, args, procenv, input); } enum TargetLocation { ThisFile(Path), ThisDirectory(Path), } fn make_compile_args<F>(config: &Config, props: &TestProps, extras: Vec<String> , xform: F, testfile: &Path) -> ProcArgs where F: FnOnce(&Config, &Path) -> TargetLocation, { let xform_file = xform(config, testfile); let target = if props.force_host { &*config.host } else { &*config.target }; // FIXME (#9639): This needs to handle non-utf8 paths let mut args = vec!(testfile.as_str().unwrap().to_string(), "-L".to_string(), config.build_base.as_str().unwrap().to_string(), format!("--target={}", target)); args.push_all(&extras); if !props.no_prefer_dynamic { args.push("-C".to_string()); args.push("prefer-dynamic".to_string()); } let path = match xform_file { TargetLocation::ThisFile(path) => { args.push("-o".to_string()); path } TargetLocation::ThisDirectory(path) => { args.push("--out-dir".to_string()); path } }; args.push(path.as_str().unwrap().to_string()); if props.force_host { args.extend(split_maybe_args(&config.host_rustcflags).into_iter()); } else { args.extend(split_maybe_args(&config.target_rustcflags).into_iter()); } args.extend(split_maybe_args(&props.compile_flags).into_iter()); return ProcArgs { prog: config.rustc_path.as_str().unwrap().to_string(), args: args, }; } fn make_lib_name(config: &Config, auxfile: &Path, testfile: &Path) -> Path { // what we return here is not particularly important, as it // happens; rustc ignores everything except for the directory. let auxname = output_testname(auxfile); aux_output_dir_name(config, testfile).join(&auxname) } fn make_exe_name(config: &Config, testfile: &Path) -> Path { let mut f = output_base_name(config, testfile); if !env::consts::EXE_SUFFIX.is_empty() { let mut fname = f.filename().unwrap().to_vec(); fname.extend(env::consts::EXE_SUFFIX.bytes()); f.set_filename(fname); } f } fn make_run_args(config: &Config, props: &TestProps, testfile: &Path) -> ProcArgs { // If we've got another tool to run under (valgrind), // then split apart its command let mut args = split_maybe_args(&config.runtool); let exe_file = make_exe_name(config, testfile); // FIXME (#9639): This needs to handle non-utf8 paths args.push(exe_file.as_str().unwrap().to_string()); // Add the arguments in the run_flags directive args.extend(split_maybe_args(&props.run_flags).into_iter()); let prog = args.remove(0); return ProcArgs { prog: prog, args: args, }; } fn split_maybe_args(argstr: &Option<String>) -> Vec<String> { match *argstr { Some(ref s) => { s .split(' ') .filter_map(|s| { if s.chars().all(|c| c.is_whitespace()) { None } else { Some(s.to_string()) } }).collect() } None => Vec::new() } } fn program_output(config: &Config, testfile: &Path, lib_path: &str, prog: String, aux_path: Option<&str>, args: Vec<String>, env: Vec<(String, String)>, input: Option<String>) -> ProcRes { let cmdline = { let cmdline = make_cmdline(lib_path, &prog, &args); logv(config, format!("executing {}", cmdline)); cmdline }; let procsrv::Result { out, err, status<|fim▁hole|> env, input).expect(&format!("failed to exec `{}`", prog)); dump_output(config, testfile, &out, &err); return ProcRes { status: status, stdout: out, stderr: err, cmdline: cmdline, }; } // Linux and mac don't require adjusting the library search path #[cfg(unix)] fn make_cmdline(_libpath: &str, prog: &str, args: &[String]) -> String { format!("{} {}", prog, args.connect(" ")) } #[cfg(windows)] fn make_cmdline(libpath: &str, prog: &str, args: &[String]) -> String { // Build the LD_LIBRARY_PATH variable as it would be seen on the command line // for diagnostic purposes fn lib_path_cmd_prefix(path: &str) -> String { format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path)) } format!("{} {} {}", lib_path_cmd_prefix(libpath), prog, args.connect(" ")) } fn dump_output(config: &Config, testfile: &Path, out: &str, err: &str) { dump_output_file(config, testfile, out, "out"); dump_output_file(config, testfile, err, "err"); maybe_dump_to_stdout(config, out, err); } fn dump_output_file(config: &Config, testfile: &Path, out: &str, extension: &str) { let outfile = make_out_name(config, testfile, extension); File::create(&outfile).write_all(out.as_bytes()).unwrap(); } fn make_out_name(config: &Config, testfile: &Path, extension: &str) -> Path { output_base_name(config, testfile).with_extension(extension) } fn aux_output_dir_name(config: &Config, testfile: &Path) -> Path { let f = output_base_name(config, testfile); let mut fname = f.filename().unwrap().to_vec(); fname.extend("libaux".bytes()); f.with_filename(fname) } fn output_testname(testfile: &Path) -> Path { Path::new(testfile.filestem().unwrap()) } fn output_base_name(config: &Config, testfile: &Path) -> Path { config.build_base .join(&output_testname(testfile)) .with_extension(&config.stage_id) } fn maybe_dump_to_stdout(config: &Config, out: &str, err: &str) { if config.verbose { println!("------{}------------------------------", "stdout"); println!("{}", out); println!("------{}------------------------------", "stderr"); println!("{}", err); println!("------------------------------------------"); } } fn error(err: &str) { println!("\nerror: {}", err); } fn fatal(err: &str) -> ! { error(err); panic!(); } fn fatal_proc_rec(err: &str, proc_res: &ProcRes) -> ! { print!("\n\ error: {}\n\ status: {}\n\ command: {}\n\ stdout:\n\ ------------------------------------------\n\ {}\n\ ------------------------------------------\n\ stderr:\n\ ------------------------------------------\n\ {}\n\ ------------------------------------------\n\ \n", err, proc_res.status, proc_res.cmdline, proc_res.stdout, proc_res.stderr); panic!(); } fn _arm_exec_compiled_test(config: &Config, props: &TestProps, testfile: &Path, env: Vec<(String, String)>) -> ProcRes { let args = make_run_args(config, props, testfile); let cmdline = make_cmdline("", &args.prog, &args.args); // get bare program string let mut tvec: Vec<String> = args.prog .split('/') .map(|ts| ts.to_string()) .collect(); let prog_short = tvec.pop().unwrap(); // copy to target let copy_result = procsrv::run("", &config.adb_path, None, &[ "push".to_string(), args.prog.clone(), config.adb_test_dir.clone() ], vec!(("".to_string(), "".to_string())), Some("".to_string())) .expect(&format!("failed to exec `{}`", config.adb_path)); if config.verbose { println!("push ({}) {} {} {}", config.target, args.prog, copy_result.out, copy_result.err); } logv(config, format!("executing ({}) {}", config.target, cmdline)); let mut runargs = Vec::new(); // run test via adb_run_wrapper runargs.push("shell".to_string()); for (key, val) in env { runargs.push(format!("{}={}", key, val)); } runargs.push(format!("{}/../adb_run_wrapper.sh", config.adb_test_dir)); runargs.push(format!("{}", config.adb_test_dir)); runargs.push(format!("{}", prog_short)); for tv in &args.args { runargs.push(tv.to_string()); } procsrv::run("", &config.adb_path, None, &runargs, vec!(("".to_string(), "".to_string())), Some("".to_string())) .expect(&format!("failed to exec `{}`", config.adb_path)); // get exitcode of result runargs = Vec::new(); runargs.push("shell".to_string()); runargs.push("cat".to_string()); runargs.push(format!("{}/{}.exitcode", config.adb_test_dir, prog_short)); let procsrv::Result{ out: exitcode_out, err: _, status: _ } = procsrv::run("", &config.adb_path, None, &runargs, vec!(("".to_string(), "".to_string())), Some("".to_string())) .expect(&format!("failed to exec `{}`", config.adb_path)); let mut exitcode: int = 0; for c in exitcode_out.chars() { if !c.is_numeric() { break; } exitcode = exitcode * 10 + match c { '0' ... '9' => c as int - ('0' as int), _ => 101, } } // get stdout of result runargs = Vec::new(); runargs.push("shell".to_string()); runargs.push("cat".to_string()); runargs.push(format!("{}/{}.stdout", config.adb_test_dir, prog_short)); let procsrv::Result{ out: stdout_out, err: _, status: _ } = procsrv::run("", &config.adb_path, None, &runargs, vec!(("".to_string(), "".to_string())), Some("".to_string())) .expect(&format!("failed to exec `{}`", config.adb_path)); // get stderr of result runargs = Vec::new(); runargs.push("shell".to_string()); runargs.push("cat".to_string()); runargs.push(format!("{}/{}.stderr", config.adb_test_dir, prog_short)); let procsrv::Result{ out: stderr_out, err: _, status: _ } = procsrv::run("", &config.adb_path, None, &runargs, vec!(("".to_string(), "".to_string())), Some("".to_string())) .expect(&format!("failed to exec `{}`", config.adb_path)); dump_output(config, testfile, &stdout_out, &stderr_out); ProcRes { status: process::ProcessExit::ExitStatus(exitcode), stdout: stdout_out, stderr: stderr_out, cmdline: cmdline } } fn _arm_push_aux_shared_library(config: &Config, testfile: &Path) { let tdir = aux_output_dir_name(config, testfile); let dirs = fs::readdir(&tdir).unwrap(); for file in &dirs { if file.extension_str() == Some("so") { // FIXME (#9639): This needs to handle non-utf8 paths let copy_result = procsrv::run("", &config.adb_path, None, &[ "push".to_string(), file.as_str() .unwrap() .to_string(), config.adb_test_dir.to_string(), ], vec!(("".to_string(), "".to_string())), Some("".to_string())) .expect(&format!("failed to exec `{}`", config.adb_path)); if config.verbose { println!("push ({}) {:?} {} {}", config.target, file.display(), copy_result.out, copy_result.err); } } } } // codegen tests (vs. clang) fn append_suffix_to_stem(p: &Path, suffix: &str) -> Path { if suffix.len() == 0 { (*p).clone() } else { let mut stem = p.filestem().unwrap().to_vec(); stem.extend("-".bytes()); stem.extend(suffix.bytes()); p.with_filename(stem) } } fn compile_test_and_save_bitcode(config: &Config, props: &TestProps, testfile: &Path) -> ProcRes { let aux_dir = aux_output_dir_name(config, testfile); // FIXME (#9639): This needs to handle non-utf8 paths let mut link_args = vec!("-L".to_string(), aux_dir.as_str().unwrap().to_string()); let llvm_args = vec!("--emit=llvm-bc,obj".to_string(), "--crate-type=lib".to_string()); link_args.extend(llvm_args.into_iter()); let args = make_compile_args(config, props, link_args, |a, b| TargetLocation::ThisDirectory( output_base_name(a, b).dir_path()), testfile); compose_and_run_compiler(config, props, testfile, args, None) } fn compile_cc_with_clang_and_save_bitcode(config: &Config, _props: &TestProps, testfile: &Path) -> ProcRes { let bitcodefile = output_base_name(config, testfile).with_extension("bc"); let bitcodefile = append_suffix_to_stem(&bitcodefile, "clang"); let testcc = testfile.with_extension("cc"); let proc_args = ProcArgs { // FIXME (#9639): This needs to handle non-utf8 paths prog: config.clang_path.as_ref().unwrap().as_str().unwrap().to_string(), args: vec!("-c".to_string(), "-emit-llvm".to_string(), "-o".to_string(), bitcodefile.as_str().unwrap().to_string(), testcc.as_str().unwrap().to_string()) }; compose_and_run(config, testfile, proc_args, Vec::new(), "", None, None) } fn extract_function_from_bitcode(config: &Config, _props: &TestProps, fname: &str, testfile: &Path, suffix: &str) -> ProcRes { let bitcodefile = output_base_name(config, testfile).with_extension("bc"); let bitcodefile = append_suffix_to_stem(&bitcodefile, suffix); let extracted_bc = append_suffix_to_stem(&bitcodefile, "extract"); let prog = config.llvm_bin_path.as_ref().unwrap().join("llvm-extract"); let proc_args = ProcArgs { // FIXME (#9639): This needs to handle non-utf8 paths prog: prog.as_str().unwrap().to_string(), args: vec!(format!("-func={}", fname), format!("-o={}", extracted_bc.as_str().unwrap()), bitcodefile.as_str().unwrap().to_string()) }; compose_and_run(config, testfile, proc_args, Vec::new(), "", None, None) } fn disassemble_extract(config: &Config, _props: &TestProps, testfile: &Path, suffix: &str) -> ProcRes { let bitcodefile = output_base_name(config, testfile).with_extension("bc"); let bitcodefile = append_suffix_to_stem(&bitcodefile, suffix); let extracted_bc = append_suffix_to_stem(&bitcodefile, "extract"); let extracted_ll = extracted_bc.with_extension("ll"); let prog = config.llvm_bin_path.as_ref().unwrap().join("llvm-dis"); let proc_args = ProcArgs { // FIXME (#9639): This needs to handle non-utf8 paths prog: prog.as_str().unwrap().to_string(), args: vec!(format!("-o={}", extracted_ll.as_str().unwrap()), extracted_bc.as_str().unwrap().to_string()) }; compose_and_run(config, testfile, proc_args, Vec::new(), "", None, None) } fn count_extracted_lines(p: &Path) -> uint { let x = File::open(&p.with_extension("ll")).read_to_end().unwrap(); let x = str::from_utf8(&x).unwrap(); x.lines().count() } fn run_codegen_test(config: &Config, props: &TestProps, testfile: &Path, mm: &mut MetricMap) { if config.llvm_bin_path.is_none() { fatal("missing --llvm-bin-path"); } if config.clang_path.is_none() { fatal("missing --clang-path"); } let mut proc_res = compile_test_and_save_bitcode(config, props, testfile); if !proc_res.status.success() { fatal_proc_rec("compilation failed!", &proc_res); } proc_res = extract_function_from_bitcode(config, props, "test", testfile, ""); if !proc_res.status.success() { fatal_proc_rec("extracting 'test' function failed", &proc_res); } proc_res = disassemble_extract(config, props, testfile, ""); if !proc_res.status.success() { fatal_proc_rec("disassembling extract failed", &proc_res); } let mut proc_res = compile_cc_with_clang_and_save_bitcode(config, props, testfile); if !proc_res.status.success() { fatal_proc_rec("compilation failed!", &proc_res); } proc_res = extract_function_from_bitcode(config, props, "test", testfile, "clang"); if !proc_res.status.success() { fatal_proc_rec("extracting 'test' function failed", &proc_res); } proc_res = disassemble_extract(config, props, testfile, "clang"); if !proc_res.status.success() { fatal_proc_rec("disassembling extract failed", &proc_res); } let base = output_base_name(config, testfile); let base_extract = append_suffix_to_stem(&base, "extract"); let base_clang = append_suffix_to_stem(&base, "clang"); let base_clang_extract = append_suffix_to_stem(&base_clang, "extract"); let base_lines = count_extracted_lines(&base_extract); let clang_lines = count_extracted_lines(&base_clang_extract); mm.insert_metric("clang-codegen-ratio", (base_lines as f64) / (clang_lines as f64), 0.001); }<|fim▁end|>
} = procsrv::run(lib_path, &prog, aux_path, &args,
<|file_name|>vn_char.py<|end_file_name|><|fim▁begin|>import lxml.html as l import requests def key_char_parse(char_id): url = 'https://vndb.org/c' + str(char_id) page = requests.get(url) root = l.fromstring(page.text) name = root.cssselect('.mainbox h1')[0].text kanji_name = root.cssselect('.mainbox h2.alttitle')[0].text img = 'https:' + root.cssselect('.mainbox .charimg img')[0].attrib['src'] gender = root.cssselect('.chardetails table thead tr td abbr')[0].attrib['title'] try: bloodtype = root.cssselect('.chardetails table thead tr td span')[0].text except IndexError: bloodtype = None table = root.cssselect('.chardetails table')[0] for row in table: if row.tag == 'tr': if len(row) == 2: try:<|fim▁hole|> value = None try: if row[1][0].tag == 'a': value = row[1][0].text else: value = [] for span in row[1]: if 'charspoil_1' in span.classes: tag = 'minor spoiler' elif 'charspoil_2' in span.classes: tag = 'spoiler' elif 'sexual' in span.classes: tag = 'sexual trait' else: tag = None value.append({'value': span[1].text, 'tag': tag}) except IndexError: value = row[1].text if key == 'Visual novels': value = [] for span in row[1]: if span.tag == 'span': value.append(span.text + span[0].text) desc = root.cssselect('.chardetails table td.chardesc')[0][1].text character = { 'URL': url, 'Name': name, 'Name_J': kanji_name, 'Image': img, 'Gender': gender, 'Blood_Type': bloodtype, 'Description': desc } return character<|fim▁end|>
key = row[0][0].text except IndexError: key = row[0].text
<|file_name|>jsxmin_renaming.cpp<|end_file_name|><|fim▁begin|>#include "jsxmin_renaming.h" #include <assert.h> #include <stdio.h> #include <iostream> // Varaible renaming of JS files. // This file includes three renaming strategies: // 1. local variable renaming; // 2. global variable renaming in the current file scope; // 3. property renaming in the current file scope; // // Local variable renaming: // This is done in function level. The first pass collects all varabiles and // functions declared in the current scope (non-recursive), and choose a new // (shorter) name for local variables and functions. New names cannot be names // used in parent scopes (the global scope is root of all local scopes). // The second pass renames identifiers in the current function using // the mapping constructed in the local scope. Here is one example: // // // Starts from global scope // var a = 10; // function func(foo, bar) { // var gee = a; // } // First, it builds a gloabl scope with mappings // global_scope => {'a' -> 'a', 'func' -> 'func'} // When entering function 'func', a local scope is built: // func_scope => {'foo' -> 'foo', 'bar' -> 'bar', 'gee' -> 'gee'} // |__ parent -> {'a' -> 'a', 'func' -> 'func'} ( *global_scope* ) // // When renaming variables in func_scope, it starts with shortest name 'a', // but it first has to loopup the scope chain to see if 'a' is used as // new name in the current scope and parent scopes. In this case, 'a' is // not available, so 'b' is choosed as new name of 'foo'. The result // local scope looks like following: // func_scope => {'foo' -> 'b', 'bar' -> 'c', 'gee' => 'd'} // |__ parent -> {'a' -> 'a', 'func' -> 'func'} ( *global_scope* ) // // The next pass is to rename identifers in the code using scope info. // In this example, 'foo', 'bar', 'gee' are renamed to 'b', 'c', and 'd', // but the identifier 'a' in func is kept the same because the global scope // preserves its original name. // // When entering a function, a new scope is created with the current scope // as its parent scope. // // Global variable renaming and property renaming: // We use naming convention that name starting with exact one '_' is private // to the file or the class (function). Also this naming convention is // voilated in our code, but these are not common and are fixable. // // A tricky part of global variable and property renaming is that we don't // collect all property names and variables, so when chooseing new names, // the new name might be used already, but we don't know. // To solve this problem, we use naming convention here again. New names for // global variables and properties names always start with one exact '_'. // It works because we collect all names starting with exact one '_'. // // TODO : Property renaming in file scope is UNSAFE: // A construtor function can set a private property named _foo, it may call // another constructor function (as its parent class) that adds a property // named _bar. Because the child and parent constructor functions are in // different files, both can get renamed to the same name. using namespace std; using namespace fbjs; #define for_nodes(p, i) \ for (node_list_t::iterator i = (p)->childNodes().begin(); \ i != (p)->childNodes().end(); \ ++i) #define WARN(format, args...) // ---- NameFactory ----- string NameFactory::next() { string result = _prefix + _current; bool found = false; // move to the next for (size_t i = 0; i < _current.size(); i++) { char c = _current[i] + 1; if (c <= 'z') { _current[i] = c; found = true; break; } } if (!found) { _current.push_back('a'); } return result; } // ---- Scope ---- void Scope::declare(string name) { _replacement[name] = name; } string Scope::new_name(string orig_name) { rename_t::iterator it = _replacement.find(orig_name); if (it != _replacement.end()) { return it->second; } if (!_parent) { return orig_name; } return _parent->new_name(orig_name); } <|fim▁hole|> return true; } if (!_parent) { return false; } return _parent->declared(name); } bool Scope::in_use(string name) { if (_new_names.find(name) != _new_names.end()) { return true; } if (!_parent) { return false; } return _parent->in_use(name); } void Scope::dump() { int indention = 0; Scope* parent = _parent; while (parent != NULL) { indention += 2; parent = parent->_parent; } for (rename_t::iterator it = _replacement.begin(); it != _replacement.end(); it++) { cout<<"//"; for (int i = 0; i < indention; i++) { cout << " "; } cout << it->first.c_str() << " -> " << it->second.c_str() << "\n"; } } bool LocalScope::need_rename(const string& name) { return name != "event"; } void LocalScope::rename_vars() { NameFactory factory; for (rename_t::iterator it = _replacement.begin(); it != _replacement.end(); it++) { string var_name = it->first; string new_name = it->second; if (need_rename(var_name)) { new_name = factory.next(); while (_parent->in_use(new_name)) { new_name = factory.next(); } } rename_internal(var_name, new_name); } } // ---- GlobalScope ---- GlobalScope::GlobalScope(bool rename_private) : Scope(NULL) { this->_rename_private = rename_private; this->_name_factory.set_prefix("_"); } bool GlobalScope::need_rename(const string& name) { return this->_rename_private && name.length() > 1 && name[0] == '_' && name[1] != '_'; } void GlobalScope::rename_vars() { for (rename_t::iterator it = _replacement.begin(); it != _replacement.end(); it++) { string var_name = it->first; string new_name = it->second; if (need_rename(var_name)) { new_name = _name_factory.next(); while (this->in_use(new_name)) { new_name = _name_factory.next(); } } rename_internal(var_name, new_name); } } void GlobalScope::rename_var(const string& var_name) { string new_name = _name_factory.next(); while (this->in_use(new_name)) { new_name = _name_factory.next(); } rename_internal(var_name, new_name); } // ----- VariableRenaming ---- VariableRenaming::VariableRenaming() { this->_global_scope = new GlobalScope(/* rename_globals */ false); } VariableRenaming::~VariableRenaming() { delete this->_global_scope; } void VariableRenaming::process(NodeProgram* root) { // Collect all symbols in the file scope build_scope(root, this->_global_scope); this->_global_scope->rename_vars(); // Starts in the global scope. minify(root, this->_global_scope); } void VariableRenaming::minify(Node* node, Scope* scope) { if (node == NULL) { return; } if (typeid(*node) == typeid(NodeObjectLiteralProperty)) { // For {prop: value}, we can't rename the property with local scope rules. minify(node->childNodes().back(), scope); } else if (typeid(*node) == typeid(NodeStaticMemberExpression)) { // a.b case, cannot rename _b minify(node->childNodes().front(), scope); } else if (typeid(*node) == typeid(NodeIdentifier)) { NodeIdentifier* n = static_cast<NodeIdentifier*>(node); string name = n->name(); if (scope->declared(name)) { n->rename(scope->new_name(name)); } } else if ( (typeid(*node) == typeid(NodeFunctionDeclaration) || typeid(*node) == typeid(NodeFunctionExpression))) { if (!function_has_with_or_eval(node)){ node_list_t::iterator func = node->childNodes().begin(); // Skip function name. ++func; // Create a new local scope for the function using current scope // as parent. Then add arguments to the local scope and build // scope for variables declared in the function. LocalScope child_scope(scope); // First, add all the arguments to scope. for_nodes(*func, arg) { NodeIdentifier *arg_node = static_cast<NodeIdentifier*>(*arg); child_scope.declare(arg_node->name()); } // Now, look ahead and find all the local variable declarations. build_scope(*(++func), &child_scope); // Build renaming map in local scope child_scope.rename_vars(); // Finally, recurse with the new scope. // Function name can only be renamed in the parent scope. for_nodes(node, ii) { if (ii == node->childNodes().begin()) { minify(*ii, scope); } else { minify(*ii, &child_scope); } } } // If the function has with and eval, don't attempt to rename code further. } else { for_nodes(node, ii) { minify(*ii, scope); } } } // Iterate through all child nodes and find if it contains with or eval // statement, it also recursively check sub functions. bool VariableRenaming::function_has_with_or_eval(Node* node) { if (node == NULL) { return false; } for_nodes(node, ii) { Node* child = *ii; if (child == NULL) { continue; } if (typeid(*child) == typeid(NodeWith)) { WARN("function has 'with' statement at line %d\n", child->lineno()); return true; } NodeFunctionCall* call = dynamic_cast<NodeFunctionCall*>(child); if (call != NULL) { NodeIdentifier* iden = dynamic_cast<NodeIdentifier*>(call->childNodes().front()); if (iden != NULL && iden->name() == "eval") { WARN("function uses 'eval' at line %d\n", call->lineno()); return true; } } if ( (typeid(*child) == typeid(NodeFunctionDeclaration) || typeid(*child) == typeid(NodeFunctionExpression)) && function_has_with_or_eval(child) ) { return true; // Don't check the current child node again if it is a function // declaration or expression. } else if (function_has_with_or_eval(child)) { return true; } } return false; } void VariableRenaming::build_scope(Node *node, Scope* scope) { if (node == NULL) { return; } if (typeid(*node) == typeid(NodeFunctionExpression)) { return; } if (typeid(*node) == typeid(NodeFunctionDeclaration)) { NodeIdentifier* decl_name = dynamic_cast<NodeIdentifier*>(node->childNodes().front()); if (decl_name) { scope->declare(decl_name->name()); } return; } if (typeid(*node) == typeid(NodeVarDeclaration)) { for_nodes(node, ii) { NodeIdentifier *n = dynamic_cast<NodeIdentifier*>(*ii); if (!n) { n = dynamic_cast<NodeIdentifier*>((*ii)->childNodes().front()); } scope->declare(n->name()); } return; } // Special case for try ... catch(e) ... // Treat e as a local variable. if (typeid(*node) == typeid(NodeTry)) { // second child is the catch variable, either null of a node identifier. node_list_t::iterator it = node->childNodes().begin(); ++it; NodeIdentifier* var = dynamic_cast<NodeIdentifier*>(*it); if (var) { scope->declare(var->name()); } return; } // Special case for 'for (i in o)' and 'i = ...'. // In these cases, if 'i' is not declared before, we treat it as a global // variable. It is most likely the developer forgot to put a 'var' before // the variable name, and we give out a warning. if (typeid(*node) == typeid(NodeAssignment) || typeid(*node) == typeid(NodeForIn)) { NodeIdentifier* var = dynamic_cast<NodeIdentifier*>(node->childNodes().front()); if (var && !scope->declared(var->name())) { // 1. assignment to an undeclared variable is made in a local scope, or // 2. for-in loop variable is not declared. if (!scope->is_global() || typeid(*node) == typeid(NodeForIn)) { WARN("'%s' at line %d is not declared, 'var %s'?\n", var->name().c_str(), var->lineno(), var->name().c_str()); this->_global_scope->reserve(var->name()); } } // Fall through to process the rest part of statement. } for_nodes(node, ii) { build_scope(*ii, scope); } } // ----- PropertyRenaming ----- // Unsafe PropertyRenaming::PropertyRenaming() { this->_property_scope = new GlobalScope(true); } PropertyRenaming::~PropertyRenaming() { delete this->_property_scope; } void PropertyRenaming::process(NodeProgram* root) { // Rewrite nodes, this is necessary to make property renaming work correctly. // e.g., a['foo'] -> a.foo, and { 'foo' : 1 } -> { foo : 1 }. ReductionWalker walker; walker.walk(root); minify(root); } void PropertyRenaming::minify(Node* node) { if (node == NULL) { return; } if (typeid(*node) == typeid(NodeObjectLiteralProperty)) { // For {prop: value}, we can't rename the property with local scope rules. NodeIdentifier* n = dynamic_cast<NodeIdentifier*>(node->childNodes().front()); if (n && _property_scope->need_rename(n->name())) { string name = n->name(); if (!_property_scope->declared(name)) { _property_scope->declare(name); _property_scope->rename_var(name); } n->rename(_property_scope->new_name(name)); } minify(node->childNodes().back()); } else if (typeid(*node) == typeid(NodeStaticMemberExpression)) { // a._b. case, rename _b part minify(node->childNodes().front()); // Must be NodeIdentifier. NodeIdentifier* n = dynamic_cast<NodeIdentifier*>(node->childNodes().back()); assert(n != NULL); if (_property_scope->need_rename(n->name())) { string name = n->name(); if (!_property_scope->declared(name)) { _property_scope->declare(name); _property_scope->rename_var(name); } n->rename(_property_scope->new_name(name)); } } else { for_nodes(node, ii) { minify(*ii); } } }<|fim▁end|>
bool Scope::declared(string name) { if (_replacement.find(name) != _replacement.end()) {
<|file_name|>http.py<|end_file_name|><|fim▁begin|># encoding: utf-8 # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. # # Author: Kyle Lahnakoski ([email protected]) # # MIMICS THE requests API (http://docs.python-requests.org/en/latest/) # DEMANDS data IS A JSON-SERIALIZABLE STRUCTURE # WITH ADDED default_headers THAT CAN BE SET USING mo_logs.settings # EG # {"debug.constants":{ # "pyLibrary.env.http.default_headers":{"From":"[email protected]"} # }} from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals from copy import copy from mmap import mmap from numbers import Number from tempfile import TemporaryFile from requests import sessions, Response import mo_json from pyLibrary import convert from mo_logs.exceptions import Except from mo_logs import Log from mo_dots import Data, coalesce, wrap, set_default, unwrap from pyLibrary.env.big_data import safe_size, ibytes2ilines, icompressed2ibytes from mo_math import Math from jx_python import jx from mo_threads import Thread, Lock from mo_threads import Till from mo_times.durations import Duration DEBUG = False FILE_SIZE_LIMIT = 100 * 1024 * 1024 MIN_READ_SIZE = 8 * 1024 ZIP_REQUEST = False default_headers = Data() # TODO: MAKE THIS VARIABLE A SPECIAL TYPE OF EXPECTED MODULE PARAMETER SO IT COMPLAINS IF NOT SET default_timeout = 600 _warning_sent = False def request(method, url, zip=None, retry=None, **kwargs): """ JUST LIKE requests.request() BUT WITH DEFAULT HEADERS AND FIXES DEMANDS data IS ONE OF: * A JSON-SERIALIZABLE STRUCTURE, OR * LIST OF JSON-SERIALIZABLE STRUCTURES, OR * None Parameters * zip - ZIP THE REQUEST BODY, IF BIG ENOUGH * json - JSON-SERIALIZABLE STRUCTURE * retry - {"times": x, "sleep": y} STRUCTURE THE BYTE_STRINGS (b"") ARE NECESSARY TO PREVENT httplib.py FROM **FREAKING OUT** IT APPEARS requests AND httplib.py SIMPLY CONCATENATE STRINGS BLINDLY, WHICH INCLUDES url AND headers """ global _warning_sent if not default_headers and not _warning_sent: _warning_sent = True Log.warning( "The pyLibrary.env.http module was meant to add extra " "default headers to all requests, specifically the 'Referer' " "header with a URL to the project. Use the `pyLibrary.debug.constants.set()` "<|fim▁hole|> "function to set `pyLibrary.env.http.default_headers`" ) if isinstance(url, list): # TRY MANY URLS failures = [] for remaining, u in jx.countdown(url): try: response = request(method, u, zip=zip, retry=retry, **kwargs) if Math.round(response.status_code, decimal=-2) not in [400, 500]: return response if not remaining: return response except Exception as e: e = Except.wrap(e) failures.append(e) Log.error("Tried {{num}} urls", num=len(url), cause=failures) if b"session" in kwargs: session = kwargs[b"session"] del kwargs[b"session"] else: session = sessions.Session() session.headers.update(default_headers) if zip is None: zip = ZIP_REQUEST if isinstance(url, unicode): # httplib.py WILL **FREAK OUT** IF IT SEES ANY UNICODE url = url.encode("ascii") _to_ascii_dict(kwargs) timeout = kwargs[b'timeout'] = coalesce(kwargs.get(b'timeout'), default_timeout) if retry == None: retry = Data(times=1, sleep=0) elif isinstance(retry, Number): retry = Data(times=retry, sleep=1) else: retry = wrap(retry) if isinstance(retry.sleep, Duration): retry.sleep = retry.sleep.seconds set_default(retry, {"times": 1, "sleep": 0}) if b'json' in kwargs: kwargs[b'data'] = convert.value2json(kwargs[b'json']).encode("utf8") del kwargs[b'json'] try: headers = kwargs[b"headers"] = unwrap(coalesce(wrap(kwargs)[b"headers"], {})) set_default(headers, {b"accept-encoding": b"compress, gzip"}) if zip and len(coalesce(kwargs.get(b"data"))) > 1000: compressed = convert.bytes2zip(kwargs[b"data"]) headers[b'content-encoding'] = b'gzip' kwargs[b"data"] = compressed _to_ascii_dict(headers) else: _to_ascii_dict(headers) except Exception as e: Log.error("Request setup failure on {{url}}", url=url, cause=e) errors = [] for r in range(retry.times): if r: Till(seconds=retry.sleep).wait() try: if DEBUG: Log.note("http {{method}} to {{url}}", method=method, url=url) return session.request(method=method, url=url, **kwargs) except Exception as e: errors.append(Except.wrap(e)) if " Read timed out." in errors[0]: Log.error("Tried {{times}} times: Timeout failure (timeout was {{timeout}}", timeout=timeout, times=retry.times, cause=errors[0]) else: Log.error("Tried {{times}} times: Request failure of {{url}}", url=url, times=retry.times, cause=errors[0]) def _to_ascii_dict(headers): if headers is None: return for k, v in copy(headers).items(): if isinstance(k, unicode): del headers[k] if isinstance(v, unicode): headers[k.encode("ascii")] = v.encode("ascii") else: headers[k.encode("ascii")] = v elif isinstance(v, unicode): headers[k] = v.encode("ascii") def get(url, **kwargs): kwargs.setdefault(b'allow_redirects', True) kwargs[b"stream"] = True return HttpResponse(request(b'get', url, **kwargs)) def get_json(url, **kwargs): """ ASSUME RESPONSE IN IN JSON """ response = get(url, **kwargs) c = response.all_content return mo_json.json2value(convert.utf82unicode(c)) def options(url, **kwargs): kwargs.setdefault(b'allow_redirects', True) kwargs[b"stream"] = True return HttpResponse(request(b'options', url, **kwargs)) def head(url, **kwargs): kwargs.setdefault(b'allow_redirects', False) kwargs[b"stream"] = True return HttpResponse(request(b'head', url, **kwargs)) def post(url, **kwargs): kwargs[b"stream"] = True return HttpResponse(request(b'post', url, **kwargs)) def delete(url, **kwargs): return HttpResponse(request(b'delete', url, **kwargs)) def post_json(url, **kwargs): """ ASSUME RESPONSE IN IN JSON """ if b"json" in kwargs: kwargs[b"data"] = convert.unicode2utf8(convert.value2json(kwargs[b"json"])) elif b'data' in kwargs: kwargs[b"data"] = convert.unicode2utf8(convert.value2json(kwargs[b"data"])) else: Log.error("Expecting `json` parameter") response = post(url, **kwargs) c = response.content try: details = mo_json.json2value(convert.utf82unicode(c)) except Exception as e: Log.error("Unexpected return value {{content}}", content=c, cause=e) if response.status_code not in [200, 201]: Log.error("Bad response", cause=Except.wrap(details)) return details def put(url, **kwargs): return HttpResponse(request(b'put', url, **kwargs)) def patch(url, **kwargs): kwargs[b"stream"] = True return HttpResponse(request(b'patch', url, **kwargs)) def delete(url, **kwargs): kwargs[b"stream"] = True return HttpResponse(request(b'delete', url, **kwargs)) class HttpResponse(Response): def __new__(cls, resp): resp.__class__ = HttpResponse return resp def __init__(self, resp): pass self._cached_content = None @property def all_content(self): # response.content WILL LEAK MEMORY (?BECAUSE OF PYPY"S POOR HANDLING OF GENERATORS?) # THE TIGHT, SIMPLE, LOOP TO FILL blocks PREVENTS THAT LEAK if self._content is not False: self._cached_content = self._content elif self._cached_content is None: def read(size): if self.raw._fp.fp is not None: return self.raw.read(amt=size, decode_content=True) else: self.close() return None self._cached_content = safe_size(Data(read=read)) if hasattr(self._cached_content, "read"): self._cached_content.seek(0) return self._cached_content @property def all_lines(self): return self.get_all_lines() def get_all_lines(self, encoding="utf8", flexible=False): try: iterator = self.raw.stream(4096, decode_content=False) if self.headers.get('content-encoding') == 'gzip': return ibytes2ilines(icompressed2ibytes(iterator), encoding=encoding, flexible=flexible) elif self.headers.get('content-type') == 'application/zip': return ibytes2ilines(icompressed2ibytes(iterator), encoding=encoding, flexible=flexible) elif self.url.endswith(".gz"): return ibytes2ilines(icompressed2ibytes(iterator), encoding=encoding, flexible=flexible) else: return ibytes2ilines(iterator, encoding=encoding, flexible=flexible, closer=self.close) except Exception as e: Log.error("Can not read content", cause=e) class Generator_usingStream(object): """ A BYTE GENERATOR USING A STREAM, AND BUFFERING IT FOR RE-PLAY """ def __init__(self, stream, length, _shared=None): """ :param stream: THE STREAM WE WILL GET THE BYTES FROM :param length: THE MAX NUMBER OF BYTES WE ARE EXPECTING :param _shared: FOR INTERNAL USE TO SHARE THE BUFFER :return: """ self.position = 0 file_ = TemporaryFile() if not _shared: self.shared = Data( length=length, locker=Lock(), stream=stream, done_read=0, file=file_, buffer=mmap(file_.fileno(), length) ) else: self.shared = _shared self.shared.ref_count += 1 def __iter__(self): return Generator_usingStream(None, self.shared.length, self.shared) def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() def next(self): if self.position >= self.shared.length: raise StopIteration end = min(self.position + MIN_READ_SIZE, self.shared.length) s = self.shared with s.locker: while end > s.done_read: data = s.stream.read(MIN_READ_SIZE) s.buffer.write(data) s.done_read += MIN_READ_SIZE if s.done_read >= s.length: s.done_read = s.length s.stream.close() try: return s.buffer[self.position:end] finally: self.position = end def close(self): with self.shared.locker: if self.shared: s, self.shared = self.shared, None s.ref_count -= 1 if s.ref_count==0: try: s.stream.close() except Exception: pass try: s.buffer.close() except Exception: pass try: s.file.close() except Exception: pass def __del__(self): self.close()<|fim▁end|>
<|file_name|>regions-close-over-type-parameter-multiple.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(box_syntax)] // Various tests where we over type parameters with multiple lifetime // bounds. trait SomeTrait { fn get(&self) -> isize; } fn make_object_good1<'a,'b,A:SomeTrait+'a+'b>(v: A) -> Box<SomeTrait+'a> { // A outlives 'a AND 'b... box v as Box<SomeTrait+'a> // ...hence this type is safe. } fn make_object_good2<'a,'b,A:SomeTrait+'a+'b>(v: A) -> Box<SomeTrait+'b> { // A outlives 'a AND 'b... box v as Box<SomeTrait+'b> // ...hence this type is safe. } fn make_object_bad<'a,'b,'c,A:SomeTrait+'a+'b>(v: A) -> Box<SomeTrait+'c> { // A outlives 'a AND 'b...but not 'c. box v as Box<SomeTrait+'a> //~ ERROR cannot infer an appropriate lifetime<|fim▁hole|> fn main() { }<|fim▁end|>
}
<|file_name|>proguard.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import optparse import os import sys from util import build_utils from util import proguard_util _DANGEROUS_OPTIMIZATIONS = [ "class/unboxing/enum", # See crbug.com/625992 "code/allocation/variable", # See crbug.com/625994 "field/propagation/value", "method/propagation/parameter", "method/propagation/returnvalue", ] def _ParseOptions(args): parser = optparse.OptionParser() build_utils.AddDepfileOption(parser) parser.add_option('--proguard-path', help='Path to the proguard executable.') parser.add_option('--input-paths', help='Paths to the .jar files proguard should run on.') parser.add_option('--output-path', help='Path to the generated .jar file.') parser.add_option('--proguard-configs', action='append', help='Paths to proguard configuration files.') parser.add_option('--proguard-config-exclusions',<|fim▁hole|> help='GN list of paths to proguard configuration files ' 'included by --proguard-configs, but that should ' 'not actually be included.') parser.add_option('--mapping', help='Path to proguard mapping to apply.') parser.add_option('--is-test', action='store_true', help='If true, extra proguard options for instrumentation tests will be ' 'added.') parser.add_option('--classpath', action='append', help='Classpath for proguard.') parser.add_option('--stamp', help='Path to touch on success.') parser.add_option('--enable-dangerous-optimizations', action='store_true', help='Enable optimizations which are known to have issues.') parser.add_option('--verbose', '-v', action='store_true', help='Print all proguard output') options, _ = parser.parse_args(args) classpath = [] for arg in options.classpath: classpath += build_utils.ParseGnList(arg) options.classpath = classpath configs = [] for arg in options.proguard_configs: configs += build_utils.ParseGnList(arg) options.proguard_configs = configs options.proguard_config_exclusions = ( build_utils.ParseGnList(options.proguard_config_exclusions)) options.input_paths = build_utils.ParseGnList(options.input_paths) return options def main(args): args = build_utils.ExpandFileArgs(args) options = _ParseOptions(args) proguard = proguard_util.ProguardCmdBuilder(options.proguard_path) proguard.injars(options.input_paths) proguard.configs(options.proguard_configs) proguard.config_exclusions(options.proguard_config_exclusions) proguard.outjar(options.output_path) if options.mapping: proguard.mapping(options.mapping) classpath = list(set(options.classpath)) proguard.libraryjars(classpath) proguard.verbose(options.verbose) if not options.enable_dangerous_optimizations: proguard.disable_optimizations(_DANGEROUS_OPTIMIZATIONS) build_utils.CallAndWriteDepfileIfStale( proguard.CheckOutput, options, input_paths=proguard.GetInputs(), input_strings=proguard.build(), output_paths=proguard.GetOutputs(), depfile_deps=proguard.GetDepfileDeps()) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))<|fim▁end|>
default='',
<|file_name|>CollisionCheckStack.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2005, 2006, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.xml.internal.bind.v2.util; import java.util.AbstractList; import java.util.Arrays; import java.util.List; import java.util.Stack; /** * {@link Stack}-like data structure that allows the following efficient operations: * * <ol> * <li>Push/pop operation. * <li>Duplicate check. When an object that's already in the stack is pushed, * this class will tell you so. * </ol> * * <p> * Object equality is their identity equality. * * <p> * This class implements {@link List} for accessing items in the stack, * but {@link List} methods that alter the stack is not supported. * * @author Kohsuke Kawaguchi */ public final class CollisionCheckStack<E> extends AbstractList<E> { private Object[] data; private int[] next; private int size = 0; /** * True if the check shall be done by using the object identity. * False if the check shall be done with the equals method. */ private boolean useIdentity = true; // for our purpose, there isn't much point in resizing this as we don't expect // the stack to grow that much. private final int[] initialHash; public CollisionCheckStack() { initialHash = new int[17]; data = new Object[16]; next = new int[16]; } /** * Set to false to use {@link Object#equals(Object)} to detect cycles. * This method can be only used when the stack is empty. */ public void setUseIdentity(boolean useIdentity) { this.useIdentity = useIdentity; } public boolean getUseIdentity() { return useIdentity; } /** * Pushes a new object to the stack. * * @return * true if this object has already been pushed */ public boolean push(E o) { if(data.length==size) expandCapacity(); data[size] = o; int hash = hash(o); boolean r = findDuplicate(o, hash); next[size] = initialHash[hash]; initialHash[hash] = size+1; size++; return r; } /** * Pushes a new object to the stack without making it participate * with the collision check. */ public void pushNocheck(E o) { if(data.length==size) expandCapacity(); data[size] = o; next[size] = -1; size++; } @Override public E get(int index) { return (E)data[index]; } @Override public int size() { return size; } private int hash(Object o) { return ((useIdentity?System.identityHashCode(o):o.hashCode())&0x7FFFFFFF) % initialHash.length; } /** * Pops an object from the stack */ public E pop() { size--; Object o = data[size]; data[size] = null; // keeping references too long == memory leak int n = next[size]; if(n<0) { // pushed by nocheck. no need to update hash } else { int hash = hash(o); assert initialHash[hash]==size+1; initialHash[hash] = n; } return (E)o; } /** * Returns the top of the stack. */ public E peek() { return (E)data[size-1]; } private boolean findDuplicate(E o, int hash) { int p = initialHash[hash]; while(p!=0) { p--; Object existing = data[p]; if (useIdentity) { if(existing==o) return true; } else { if (o.equals(existing)) return true;<|fim▁hole|> } p = next[p]; } return false; } private void expandCapacity() { int oldSize = data.length; int newSize = oldSize * 2; Object[] d = new Object[newSize]; int[] n = new int[newSize]; System.arraycopy(data,0,d,0,oldSize); System.arraycopy(next,0,n,0,oldSize); data = d; next = n; } /** * Clears all the contents in the stack. */ public void reset() { if(size>0) { size = 0; Arrays.fill(initialHash,0); } } /** * String that represents the cycle. */ public String getCycleString() { StringBuilder sb = new StringBuilder(); int i=size()-1; E obj = get(i); sb.append(obj); Object x; do { sb.append(" -> "); x = get(--i); sb.append(x); } while(obj!=x); return sb.toString(); } }<|fim▁end|>
<|file_name|>most_calls.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import collections import os import sys <|fim▁hole|>from librarytrader.librarystore import LibraryStore s = LibraryStore() s.load(sys.argv[1]) n = 20 if len(sys.argv) > 2: n = int(sys.argv[2]) outgoing_calls = set() incoming_calls = collections.defaultdict(int) for l in s.get_library_objects(): for f, names in l.exported_addrs.items(): s = 0 name = '{}:{}'.format(l.fullname, names[0]) s += len(l.internal_calls.get(f, [])) s += len(l.external_calls.get(f, [])) s += len(l.local_calls.get(f, [])) outgoing_calls.add((name, s)) for f, names in l.local_functions.items(): s = 0 name = '{}:LOCAL_{}'.format(l.fullname, names[0]) s += len(l.internal_calls.get(f, [])) s += len(l.external_calls.get(f, [])) s += len(l.local_calls.get(f, [])) outgoing_calls.add((name, s)) for source, targets in l.internal_calls.items(): for target in targets: key = '{}:{}'.format(l.fullname, l.exported_addrs[target][0]) incoming_calls[key] += 1 for source, targets in l.local_calls.items(): for target in targets: key = '{}:LOCAL_{}'.format(l.fullname, l.local_functions[target][0]) incoming_calls[key] += 1 out_sorted = sorted(outgoing_calls, key=lambda x: x[1]) print('Top {} outgoing calls'.format(n)) for tp in out_sorted[-n:]: print(tp[0], tp[1]) print('') print('Top {} incoming calls (direct)'.format(n)) in_sorted = sorted(incoming_calls.items(), key=lambda x:x[1]) for tp in in_sorted[-n:]: print(tp[0], tp[1])<|fim▁end|>
<|file_name|>factorssvr.go<|end_file_name|><|fim▁begin|>package main import ( "fmt" "net/http" "log" "strconv" "github.com/stvndall/languagetechstats/src/go/services/factors" "github.com/gorilla/mux" ) func main(){ router := mux.NewRouter().StrictSlash(true) router.HandleFunc("/{numbers}", factorise) log.Fatal(http.ListenAndServe(":2500", router)) } <|fim▁hole|> if(err != nil){ fmt.Fprintf(w,"%v",err) return } fmt.Fprintf(w, "%v",factors.PrimeFactors(number) ) }<|fim▁end|>
func factorise(w http.ResponseWriter, r *http.Request) { number, err := strconv.Atoi(mux.Vars(r)["numbers"])
<|file_name|>staticfiles.js<|end_file_name|><|fim▁begin|>var Path = require('path'); var Hapi = require('hapi'); var server = new Hapi.Server(); var port = process.env.PORT || 5000;<|fim▁hole|>server.views({ engines: { html: require('handlebars') }, path: Path.join(__dirname, 'views') }); server.route([ { path: '/', method: 'GET', config: { auth: false, handler: function(request, reply) { reply.view("index"); } } }, { method: 'GET', path: '/public/{param*}', handler: { directory: { path: Path.normalize(__dirname + '/public') } } } ]); server.start(function(){ console.log('Static Server Listening on : http://127.0.0.1:' +port); }); module.exports = server;<|fim▁end|>
server.connection({ port: port });
<|file_name|>structdpctl__params.js<|end_file_name|><|fim▁begin|>var structdpctl__params =<|fim▁hole|> [ "aux", "structdpctl__params.html#a875761aaf475439ac2f51f564f8a558d", null ], [ "is_appctl", "structdpctl__params.html#a4d1857bb1afb99d03f1586a81c415b17", null ], [ "may_create", "structdpctl__params.html#aad17dd95f7fe3ebcceb3a0745e84b372", null ], [ "output", "structdpctl__params.html#a9dce609f19744eec92e846c45eef8b5a", null ], [ "print_statistics", "structdpctl__params.html#ab7e264c4a84c35d004e4e5df863438a7", null ], [ "usage", "structdpctl__params.html#a4d369f75cf9f886b57ead7cf1aabd1cf", null ], [ "verbosity", "structdpctl__params.html#a08a8be187581be2eb16d38bf0ede8f57", null ], [ "zero_statistics", "structdpctl__params.html#ae830308818ce5edb0dcb5a6a70f48eb8", null ] ];<|fim▁end|>
[
<|file_name|>bless.rs<|end_file_name|><|fim▁begin|>//! `bless` updates the reference files in the repo with changed output files //! from the last test run. use std::ffi::OsStr; use std::fs; use std::lazy::SyncLazy; use std::path::{Path, PathBuf}; use walkdir::WalkDir; use crate::clippy_project_root; static CLIPPY_BUILD_TIME: SyncLazy<Option<std::time::SystemTime>> = SyncLazy::new(|| { let mut path = std::env::current_exe().unwrap(); path.set_file_name("cargo-clippy"); fs::metadata(path).ok()?.modified().ok() }); /// # Panics /// /// Panics if the path to a test file is broken pub fn bless(ignore_timestamp: bool) { let test_suite_dirs = [ clippy_project_root().join("tests").join("ui"), clippy_project_root().join("tests").join("ui-internal"), clippy_project_root().join("tests").join("ui-toml"), clippy_project_root().join("tests").join("ui-cargo"), ]; for test_suite_dir in &test_suite_dirs { WalkDir::new(test_suite_dir) .into_iter() .filter_map(Result::ok) .filter(|f| f.path().extension() == Some(OsStr::new("rs"))) .for_each(|f| { let test_name = f.path().strip_prefix(test_suite_dir).unwrap(); for &ext in &["stdout", "stderr", "fixed"] { let test_name_ext = format!("stage-id.{}", ext); update_reference_file( f.path().with_extension(ext), test_name.with_extension(test_name_ext), ignore_timestamp, ); } }); } } fn update_reference_file(reference_file_path: PathBuf, test_name: PathBuf, ignore_timestamp: bool) { let test_output_path = build_dir().join(test_name); let relative_reference_file_path = reference_file_path.strip_prefix(clippy_project_root()).unwrap(); // If compiletest did not write any changes during the test run, // we don't have to update anything if !test_output_path.exists() { return; } // If the test output was not updated since the last clippy build, it may be outdated if !ignore_timestamp && !updated_since_clippy_build(&test_output_path).unwrap_or(true) { return; } let test_output_file = fs::read(&test_output_path).expect("Unable to read test output file"); let reference_file = fs::read(&reference_file_path).unwrap_or_default(); if test_output_file != reference_file { // If a test run caused an output file to change, update the reference file println!("updating {}", &relative_reference_file_path.display()); fs::copy(test_output_path, &reference_file_path).expect("Could not update reference file"); // We need to re-read the file now because it was potentially updated from copying let reference_file = fs::read(&reference_file_path).unwrap_or_default(); <|fim▁hole|> // If we copied over an empty output file, we remove the now empty reference file println!("removing {}", &relative_reference_file_path.display()); fs::remove_file(reference_file_path).expect("Could not remove reference file"); } } } fn updated_since_clippy_build(path: &Path) -> Option<bool> { let clippy_build_time = (*CLIPPY_BUILD_TIME)?; let modified = fs::metadata(path).ok()?.modified().ok()?; Some(modified >= clippy_build_time) } fn build_dir() -> PathBuf { let mut path = std::env::current_exe().unwrap(); path.set_file_name("test"); path }<|fim▁end|>
if reference_file.is_empty() {
<|file_name|>node_addr.py<|end_file_name|><|fim▁begin|>import squeakspace.common.util as ut import squeakspace.common.util_http as ht import squeakspace.proxy.server.db_sqlite3 as db import squeakspace.common.squeak_ex as ex import config def post_handler(environ): query = ht.parse_post_request(environ) cookies = ht.parse_cookies(environ) user_id = ht.get_required_cookie(cookies, 'user_id') session_id = ht.get_required_cookie(cookies, 'session_id') node_name = ht.get_required(query, 'node_name') url = ht.get_required(query, 'url') real_node_name = ht.get_required(query, 'real_node_name') fingerprint = ht.get_optional(query, 'fingerprint') conn = db.connect(config.db_path) try: c = db.cursor(conn) db.set_node_addr(c, user_id, session_id, node_name, url, real_node_name, fingerprint) db.commit(conn) raise ht.ok_json({'status' : 'ok'}) except ex.SqueakException as e: raise ht.convert_squeak_exception(e) finally: db.close(conn) def get_handler(environ): query = ht.parse_get_request(environ) cookies = ht.parse_cookies(environ) user_id = ht.get_required_cookie(cookies, 'user_id') session_id = ht.get_required_cookie(cookies, 'session_id') node_name = ht.get_required(query, 'node_name') conn = db.connect(config.db_path) try: c = db.cursor(conn) addr = db.read_node_addr(c, user_id, session_id, node_name) raise ht.ok_json({'status' : 'ok', 'addr' : addr}) except ex.SqueakException as e: raise ht.convert_squeak_exception(e) finally: db.close(conn) def delete_handler(environ): query = ht.parse_post_request(environ) cookies = ht.parse_cookies(environ) user_id = ht.get_required_cookie(cookies, 'user_id') session_id = ht.get_required_cookie(cookies, 'session_id') node_name = ht.get_required(query, 'node_name') conn = db.connect(config.db_path) try: c = db.cursor(conn) db.delete_node_addr(c, user_id, session_id, node_name) db.commit(conn) raise ht.ok_json({'status' : 'ok'}) except ex.SqueakException as e: raise ht.convert_squeak_exception(e)<|fim▁hole|> db.close(conn) def main_handler(environ): ht.dispatch_on_method(environ, { 'POST' : post_handler, 'GET' : get_handler, 'DELETE' : delete_handler}) def application(environ, start_response): return ht.respond_with_handler(environ, start_response, main_handler)<|fim▁end|>
finally:
<|file_name|>test_func_transmit.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python ## ## Copyright 2008, Various ## Adrian Likins <[email protected]> ## ## This software may be freely redistributed under the terms of the GNU ## general public license.<|fim▁hole|>## import os import socket import subprocess import time import unittest import simplejson import func.utils from func import yaml from func import jobthing def structToYaml(data): # takes a data structure, serializes it to # yaml buf = yaml.dump(data) return buf def structToJSON(data): #Take data structure for the test #and serializes it using json serialized = simplejson.dumps(input) return serialized class BaseTest(object): # assume we are talking to localhost # th = socket.gethostname() th = socket.getfqdn() nforks=1 async=False ft_cmd = "func-transmit" # just so we can change it easy later def _serialize(self, data): raise NotImplementedError def _deserialize(self, buf): raise NotImplementedError def _call_async(self, data): data['async'] = True data['nforks'] = 4 job_id = self._call(data) no_answer = True while (no_answer): out = self._call({'clients': '*', 'method':'job_status', 'parameters': job_id}) if out[0] == jobthing.JOB_ID_FINISHED: no_answer = False else: time.sleep(.25) result = out[1] return result def _call(self, data): f = self._serialize(data) p = subprocess.Popen(self.ft_cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE) output = p.communicate(input=f) return self._deserialize(output[0]) def call(self, data): if self.async: return self._call_async(data) return self._call(data) def __init__(self): pass # we do this all over the place... def assert_on_fault(self, result): assert func.utils.is_error(result[self.th]) == False # assert type(result[self.th]) != xmlrpclib.Fault class YamlBaseTest(BaseTest): # i'd add the "yaml" attr here for nosetest to find, but it doesnt # seem to find it unless the class is a test class directly ft_cmd = "func-transmit --yaml" def _serialize(self, data): buf = yaml.dump(data) return buf def _deserialize(self, buf): data = yaml.load(buf).next() return data class JSONBaseTest(BaseTest): ft_cmd = "func-transmit --json" def _serialize(self, data): buf = simplejson.dumps(data) return buf def _deserialize(self, buf): data = simplejson.loads(buf) return data class ListMinion(object): def test_list_minions(self): out = self.call({'clients': '*', 'method': 'list_minions'}) def test_list_minions_no_match(self): out = self.call({'clients': 'somerandom-name-that-shouldnt-be-a_real_host_name', 'method': 'list_minions'}) assert out == [] def test_list_minions_group_name(self): out = self.call({'clients': '@test', 'method': 'list_minions'}) def test_list_minions_no_clients(self): out = self.call({'method': 'list_minions'}) class ListMinionAsync(ListMinion): async = True class TestListMinionYaml(YamlBaseTest, ListMinion): yaml = True def __init__(self): super(TestListMinionYaml, self).__init__() class TestListMinionJSON(JSONBaseTest, ListMinion): json = True def __init__(self): super(TestListMinionJSON, self).__init__() # list_minions is a convience call for func_transmit, and doesn't # really make any sense to call async #class TestListMinionYamlAsync(YamlBaseTest, ListMinionAsync): # yaml = True # async = True # def __init__(self): # super(TestListMinionYamlAsync, self).__init__() #class TestListMinionJSONAsync(JSONBaseTest, ListMinionAsync): # json = True # async = True # def __init__(self): # super(TestListMinionJSONAsync, self).__init__() class ClientGlob(object): def _test_add(self, client): result = self.call({'clients': client, 'method': 'add', 'module': 'test', 'parameters': [1,2]}) self.assert_on_fault(result) return result def test_single_client(self): result = self._test_add(self.th) def test_glob_client(self): result = self._test_add("*") def test_glob_list(self): result = self._test_add([self.th, self.th]) def test_glob_string_list(self): result = self._test_add("%s;*" % self.th) # note, needs a /etc/func/group setup with the proper groups defined # need to figure out a good way to test this... -akl def test_group(self): result = self._test_add("@test") # def test_group_and_glob(self): # result = self._test_add("@test;*") # def test_list_of_groups(self): # result = self._test_add(["@test", "@test2"]) # def test_string_list_of_groups(self): # result = self._test_add("@test;@test2") # run all the same tests, but run then class ClientGlobAsync(ClientGlob): async = True class TestClientGlobYaml(YamlBaseTest, ClientGlob): yaml = True def __init__(self): super(TestClientGlobYaml, self).__init__() class TestClientGlobJSON(JSONBaseTest, ClientGlob): json = True def __init__(self): super(TestClientGlobJSON, self).__init__() class TestClientGlobYamlAsync(YamlBaseTest, ClientGlobAsync): yaml = True async = True def __init__(self): super(TestClientGlobYamlAsync, self).__init__() class TestClientGlobJSONAsync(JSONBaseTest, ClientGlobAsync): json = True async = True def __init__(self): super(TestClientGlobJSONAsync, self).__init__() # why the weird T_est name? because nosetests doesn't seem to reliably # respect the __test__ attribute, and these modules aren't meant to be # invoked as test classes themselves, only as bases for other tests class T_estTest(object): __test__ = False def _echo_test(self, data): result = self.call({'clients':'*', 'method': 'echo', 'module': 'test', 'parameters': [data]}) self.assert_on_fault(result) assert result[self.th] == data def test_add(self): result = self.call({'clients':'*', 'method': 'add', 'module': 'test', 'parameters': [1,2]}) assert result[self.th] == 3 def test_echo_int(self): self._echo_test(37) def test_echo_array(self): self._echo_test([1,2,"three", "fore", "V"]) def test_echo_hash(self): self._echo_test({'one':1, 'two':2, 'three': 3, 'four':"IV"}) def test_echo_float(self): self._echo_test(1.0) # NOTE/FIXME: the big float tests fail for yaml and json def test_echo_big_float(self): self._echo_test(123121232.23) def test_echo_bigger_float(self): self._echo_test(234234234234234234234.234234234234234) def test_echo_little_float(self): self._echo_test(0.0000000000000000000000000000000000037) # Note/FIXME: these test currently fail for YAML def test_echo_boolean_true(self): self._echo_test(True) def test_echo_boolean_false(self): self._echo_test(False) class T_estTestAsync(T_estTest): __test__ = False async = True class TestTestYaml(YamlBaseTest, T_estTest): yaml = True def __init__(self): super(YamlBaseTest, self).__init__() class TestTestJSON(JSONBaseTest, T_estTest): json = True def __init__(self): super(JSONBaseTest,self).__init__() class TestTestAsyncJSON(JSONBaseTest, T_estTestAsync): json = True async = True def __init__(self): super(JSONBaseTest,self).__init__() class TestTestAsyncYaml(YamlBaseTest, T_estTestAsync): yaml = True async = True def __init__(self): super(YamlBaseTest,self).__init__()<|fim▁end|>
<|file_name|>mp4.rs<|end_file_name|><|fim▁begin|>#![no_main] use libfuzzer_sys::fuzz_target; use mp4parse_capi::*; use std::convert::TryInto; use std::io::Read; type CursorType<'a> = std::io::Cursor<&'a [u8]>; extern "C" fn vec_read(buf: *mut u8, size: usize, userdata: *mut std::os::raw::c_void) -> isize { let input: &mut CursorType = unsafe { &mut *(userdata as *mut _) }; let buf = unsafe { std::slice::from_raw_parts_mut(buf, size) }; match input.read(buf) { Ok(n) => n.try_into().expect("invalid conversion"), Err(_) => -1, } } fuzz_target!(|data: &[u8]| { let mut cursor: CursorType = std::io::Cursor::new(data);<|fim▁hole|> read: Some(vec_read), userdata: &mut cursor as *mut _ as *mut std::os::raw::c_void, }; unsafe { let mut context = std::ptr::null_mut(); if mp4parse_new(&io, &mut context) != Mp4parseStatus::Ok { return; } let mut frag_info = Default::default(); mp4parse_get_fragment_info(context, &mut frag_info); let mut pssh_info = Default::default(); mp4parse_get_pssh_info(context, &mut pssh_info); let mut count = 0; mp4parse_get_track_count(context, &mut count); for track in 0..count { let mut fragmented = 0; mp4parse_is_fragmented(context, track, &mut fragmented); let mut info = Default::default(); mp4parse_get_track_info(context, track, &mut info); match info.track_type { Mp4parseTrackType::Video => { let mut video = Mp4parseTrackVideoInfo::default(); mp4parse_get_track_video_info(context, track, &mut video); } Mp4parseTrackType::Audio => { let mut audio = Default::default(); mp4parse_get_track_audio_info(context, track, &mut audio); } // No C API for metadata tracks yet. Mp4parseTrackType::Metadata => {} } let mut indices = Default::default(); mp4parse_get_indice_table(context, track, &mut indices); } mp4parse_free(context); } });<|fim▁end|>
let io = Mp4parseIo {
<|file_name|>callback_utils.py<|end_file_name|><|fim▁begin|>__all__ = [ 'fixed_value', 'coalesce', ] try: from itertools import ifilter as filter except ImportError: pass class _FixedValue(object): def __init__(self, value): self._value = value def __call__(self, *args, **kwargs): return self._value def fixed_value(value): return _FixedValue(value) class _Coalesce(object): def _filter(self, x): return x is not None def __init__(self, callbacks, else_=None): self._callbacks = callbacks self._else = else_ def __call__(self, invoice): results = ( callback(invoice) for callback in self._callbacks ) try: return next(filter( self._filter, results ))<|fim▁hole|> except StopIteration: return self._else def coalesce(callbacks, else_=None): return _Coalesce(callbacks, else_=else_)<|fim▁end|>
<|file_name|>test_layers.py<|end_file_name|><|fim▁begin|>import unittest import numpy as np import theano import theano.tensor as T from tests.helpers import (SimpleTrainer, SimpleClf, SimpleTransformer, simple_reg) from theano_wrapper.layers import (BaseLayer, HiddenLayer, MultiLayerBase, BaseEstimator, BaseTransformer, LinearRegression, LogisticRegression, MultiLayerPerceptron, MultiLayerRegression, TiedAutoEncoder, AutoEncoder) # BASE LAYERS ################################################################ class TestBaseLayer(unittest.TestCase): """ Tests for layer.py module, which includes various types of layers for theano-wrapper """ def test_base_layer_has_params(self): base = BaseLayer(100, 10) self.assertTrue(hasattr(base, 'params'), msg="Class has no attribute 'parameters'") def test_base_layer_params_not_empty(self): base = BaseLayer(100, 10) self.assertTrue(base.params, msg="Class 'parameters' are empty") def test_base_layer_no_args(self): # Test if BaseLayer initializes as expected when given no # extra arguements try: BaseLayer(100, 10) except Exception as e: self.fail("Class initialization failed: %s" % str(e)) def test_base_layer_params_are_theano_shared_variables(self): base = BaseLayer(100, 10) for p in base.params: self.assertIsInstance(p, theano.compile.SharedVariable) def test_base_layer_has_input(self): base = BaseLayer(100, 10) self.assertTrue(hasattr(base, 'X')) def test_base_layer_input_is_theano_variable(self): base = BaseLayer(100, 10) self.assertIsInstance(base.X, theano.tensor.TensorVariable) def test_base_layer_weights_shape(self): base = BaseLayer(100, 10) self.assertEqual(base.W.get_value().shape, (100, 10)) def test_base_layer_bias_shape(self): base = BaseLayer(100, 10) self.assertEqual(base.b.get_value().shape, (10,)) def test_base_layer_weights_shape_single_output(self): base = BaseLayer(100, 1) self.assertEqual(base.W.get_value().shape, (100,)) def test_base_layer_bias_shape_single_output(self): base = BaseLayer(100, 1) self.assertEqual(base.b.get_value().shape, ()) def test_base_layer_no_output(self): base = BaseLayer(100, 10) self.assertFalse(hasattr(base, 'y')) def test_base_layer_int_output(self): base = BaseLayer(100, 10, y='int') self.assertTrue(hasattr(base, 'y')) self.assertTrue(hasattr(base.y, 'dtype')) self.assertEqual(base.y.dtype, 'int32') def test_base_layer_float_output(self): base = BaseLayer(100, 10, y='float') self.assertTrue(hasattr(base, 'y')) self.assertTrue(hasattr(base.y, 'dtype')) self.assertEqual(base.y.dtype, 'float32') def test_base_layer_custom_weights(self): try: BaseLayer(100, 10, weights=np.random.random_sample((100, 10))) except TypeError: self.fail("Class did not accept 'weights' arg") class TestHiddenLayer(unittest.TestCase): """ Tests for HiddenLayer class. This class is used only by other classes, so mostly basic stuff here. """ def test_hidden_layer_has_params(self): base = HiddenLayer(100, 10) self.assertTrue(hasattr(base, 'params'), msg="Class has no attribute 'parameters'") def test_hidden_layer_params_not_empty(self): base = HiddenLayer(100, 10) self.assertTrue(base.params, msg="Class 'parameters' are empty") def test_hidden_layer_no_args(self): # Test if HiddenLayer initializes as expected when given no # extra arguements try: HiddenLayer(100, 10)<|fim▁hole|> self.fail("Class initialization failed: %s" % str(e)) def test_hidden_layer_params_are_theano_shared_variables(self): base = HiddenLayer(100, 10) for p in base.params: self.assertIsInstance(p, theano.compile.SharedVariable) def test_hidden_layer_has_input(self): base = HiddenLayer(100, 10) self.assertTrue(hasattr(base, 'X')) def test_hidden_layer_input_is_theano_variable(self): base = HiddenLayer(100, 10) self.assertIsInstance(base.X, theano.tensor.TensorVariable) def test_hidden_layer_weights_shape(self): base = HiddenLayer(100, 10) self.assertEqual(base.W.get_value().shape, (100, 10)) def test_hidden_layer_bias_shape(self): base = HiddenLayer(100, 10) self.assertEqual(base.b.get_value().shape, (10,)) def test_hidden_layer_weights_shape_single_output(self): base = HiddenLayer(100, 1) self.assertEqual(base.W.get_value().shape, (100,)) def test_hidden_layer_bias_shape_single_output(self): base = HiddenLayer(100, 1) self.assertEqual(base.b.get_value().shape, ()) def test_hidden_layer_no_output(self): base = HiddenLayer(100, 10) self.assertFalse(hasattr(base, 'y')) def test_hidden_layer_int_output(self): base = HiddenLayer(100, 10, y='int') self.assertTrue(hasattr(base, 'y')) self.assertTrue(hasattr(base.y, 'dtype')) self.assertEqual(base.y.dtype, 'int32') def test_hidden_layer_float_output(self): base = HiddenLayer(100, 10, y='float') self.assertTrue(hasattr(base, 'y')) self.assertTrue(hasattr(base.y, 'dtype')) self.assertEqual(base.y.dtype, 'float32') class TestMultiLayerBase(unittest.TestCase): """ Tests for MultiLayerBase class """ def test_multi_layer_base_has_params(self): base = MultiLayerBase(100, 50, 10, SimpleClf) self.assertTrue(hasattr(base, 'params'), msg="Class has no attribute 'parameters'") def test_multi_layer_base_params_not_empty(self): base = MultiLayerBase(100, 50, 10, SimpleClf) self.assertTrue(base.params, msg="Class 'parameters' are empty") def test_multi_layer_base_no_args(self): # Test if MultiLayerBase initializes as expected when given no # extra arguements try: MultiLayerBase(100, 50, 10, SimpleClf) except Exception as e: self.fail("Class initialization failed: %s" % str(e)) def test_multi_layer_base_single_layer(self): # Test if MultiLayerBase initializes as expected when given no # extra arguements try: MultiLayerBase(100, 50, 10, SimpleClf) except Exception as e: self.fail("Class initialization failed: %s" % str(e)) def test_multi_layer_base_multi_layer_single_activation(self): # Test if MultiLayerBase initializes as expected when given no # extra arguements try: MultiLayerBase(100, [100, 30, 50], 10, SimpleClf, lambda x: x) except Exception as e: self.fail("Class initialization failed: %s" % str(e)) def test_multi_layer_base_multi_layer_multi_activation(self): # Test if MultiLayerBase initializes as expected when given no # extra arguements try: MultiLayerBase(100, [100, 30, 50], 10, SimpleClf, [lambda x: x for i in range(3)]) except Exception as e: self.fail("Class initialization failed: %s" % str(e)) class BaseEstimatorTransformerTests: def test_has_trainers(self): clf = self.Clf() for t in ['epoch', 'sgd']: self.assertIn(t, clf.trainer_aliases) def test_builtin_sgd_trainer(self): clf = self.Clf() try: clf.fit(*self.fit_args, 'sgd', max_iter=1) except Exception as e: self.fail("Fitting failed: %s" % str(e)) def test_builtin_sgd_trainer_all_args(self): clf = self.Clf() try: clf.fit(*self.fit_args, 'sgd', alpha=0.1, batch_size=20, max_iter=1, patience=100, p_inc=3, imp_thresh=0.9, random=10, verbose=1000) except Exception as e: self.fail("Fitting failed: %s" % str(e)) def test_builtin_trainer_regularizer(self): clf = self.Clf() reg = simple_reg(clf) try: clf.fit(*self.fit_args, reg=reg, max_iter=2) except Exception as e: self.fail("Fitting failed: %s" % str(e)) class TestBaseEstimator(unittest.TestCase, BaseEstimatorTransformerTests): TheBase = BaseEstimator TheClf = SimpleClf X = np.random.standard_normal((500, 100)).astype(np.float32) y = np.random.randint(0, 9, (500,)).astype(np.int32) fit_args = (X, y,) def setUp(self): class Clf(self.TheClf, self.TheBase): def __init__(*args, **kwargs): SimpleClf.__init__(*args, **kwargs) self.Clf = Clf class TestBaseTransformer(unittest.TestCase, BaseEstimatorTransformerTests): TheBase = BaseTransformer TheClf = SimpleTransformer X = np.random.standard_normal((500, 100)).astype(np.float32) fit_args = (X,) def setUp(self): class Clf(self.TheClf, self.TheBase): def __init__(*args, **kwargs): self.TheClf.__init__(*args, **kwargs) self.Clf = Clf # ESTIMATORS ################################################################# class EstimatorTests: X = np.random.standard_normal((500, 100)).astype(np.float32) def test_estimator_has_params(self): clf = self.estimator(*self.args) self.assertTrue(hasattr(clf, 'params')) self.assertIsNotNone(clf.params) def test_estimator_has_output(self): clf = self.estimator(*self.args) self.assertIsInstance(clf.output, theano.tensor.TensorVariable) def test_estimator_has_cost(self): clf = self.estimator(*self.args) self.assertIsInstance(clf.cost, theano.tensor.TensorVariable) def test_estimator_fit(self): trn = SimpleTrainer(self.estimator(*self.args)) try: trn.fit(self.X, self.y) except Exception as e: self.fail("Training failed: %s" % str(e)) def test_estimator_with_regularization(self): clf = self.estimator(*self.args) reg = simple_reg(clf) try: trn = SimpleTrainer(clf, reg) trn.fit(self.X, self.y) except Exception as e: self.fail("Estimator failed: %s" % str(e)) def test_estimator_builtin_fit(self): clf = self.estimator(*self.args) try: clf.fit(self.X, self.y, max_iter=1) except Exception as e: self.fail("Estimator failed: %s" % str(e)) def test_estimator_builtin_predict(self): clf = self.estimator(*self.args) clf.fit(self.X, self.y, max_iter=1) pred = clf.predict(self.X) self.assertEqual(pred.shape, (self.X.shape[0],)) class MultiLayerEstimatorMixin: def test_estimator_fit_three_hidden_single_activation(self): args = list(self.args) # set n_hidden arg to an array of n_nodes for three layers args[1] = [args[0], int(args[0]/2), int(args[0]/3)] trn = SimpleTrainer(self.estimator(*args)) try: trn.fit(self.X, self.y) except Exception as e: self.fail("Training failed: %s" % str(e)) def test_estimator_random_arguement_int_seed(self): # The estimator should accept a random arguement for initialization # of weights. Here we test an integer seed. trn = SimpleTrainer(self.estimator(*self.args, random=42)) try: trn.fit(self.X, self.y) except Exception as e: self.fail("Training failed: %s" % str(e)) def test_estimator_random_arguement_rng(self): # The estimator should accept a random arguement for initialization # of weights. Here we test a random state generator trn = SimpleTrainer(self.estimator(*self.args, random=np.random.RandomState(42))) try: trn.fit(self.X, self.y) except Exception as e: self.fail("Training failed: %s" % str(e)) class ClassificationTest(EstimatorTests): y = np.random.randint(0, 9, (500,)).astype(np.int32) class RegressionTest(EstimatorTests): y = np.random.random((500,)).astype(np.float32) def test_estimator_fit_multivariate(self): args = list(self.args) args[-1] = 5 y = np.random.random((500, 5)).astype(np.float32) trn = SimpleTrainer(self.estimator(*args)) try: trn.fit(self.X, y) except Exception as e: self.fail("Training failed: %s" % str(e)) class TestLinearRegression(unittest.TestCase, RegressionTest): estimator = LinearRegression args = (100, 1) class TestLogisticRegression(unittest.TestCase, ClassificationTest): estimator = LogisticRegression args = (100, 10) class TestMultiLayerPerceptron(unittest.TestCase, ClassificationTest, MultiLayerEstimatorMixin): estimator = MultiLayerPerceptron args = (100, 100, 10) class TestMultiLayerRegression(unittest.TestCase, RegressionTest, MultiLayerEstimatorMixin): estimator = MultiLayerRegression args = (100, 100, 1) # TRANSFORMERS ############################################################### class TransformerTests: X = np.random.standard_normal((500, 100)).astype(np.float32) def test_transformer_has_params(self): clf = self.transformer(*self.args) self.assertTrue(hasattr(clf, 'params')) self.assertIsNotNone(clf.params) def test_transformer_has_encode(self): clf = self.transformer(*self.args) self.assertIsInstance(clf.encode, theano.tensor.TensorVariable) def test_transformer_has_cost(self): clf = self.transformer(*self.args) self.assertIsInstance(clf.cost, theano.tensor.TensorVariable) def test_transformer_fit(self): trn = SimpleTrainer(self.transformer(*self.args)) try: trn.fit(self.X) except Exception as e: self.fail("Training failed: %s" % str(e)) def test_transformer_with_regularization(self): clf = self.transformer(*self.args) reg = simple_reg(clf) try: trn = SimpleTrainer(clf, reg) trn.fit(self.X) except Exception as e: self.fail("Estimator failed: %s" % str(e)) def test_transfomer_float_n_hidden(self): args = list(self.args) args[-1] = 0.5 trn = SimpleTrainer(self.transformer(*args)) try: trn.fit(self.X) except Exception as e: self.fail("Training failed: %s" % str(e)) def test_transformer_builtin_fit(self): clf = self.transformer(*self.args) try: clf.fit(self.X, max_iter=1) except Exception as e: self.fail("Estimator failed: %s" % str(e)) def test_transformer_builtin_predict(self): clf = self.transformer(*self.args) clf.fit(self.X, max_iter=1) pred = clf.predict(self.X) self.assertEqual(pred.shape, (self.X.shape)) def test_transformer_builtin_transform(self): clf = self.transformer(*self.args) clf.fit(self.X, max_iter=1) pred = clf.transform(self.X) self.assertEqual(pred.shape, (self.X.shape[0], self.args[-1])) class MultiLayerTransformerMixin: def test_transformer_fit_three_hidden_single_activation(self): args = list(self.args) # set n_hidden arg to an array of n_nodes for three layers args[1] = [args[0], int(args[0]/2), int(args[0]/3)] trn = SimpleTrainer(self.transformer(*args)) try: trn.fit(self.X) except Exception as e: self.fail("Training failed: %s" % str(e)) def test_transformer_fit_three_hidden_all_activations(self): args = list(self.args) # set n_hidden arg to an array of n_nodes for three layers args[1] = [args[0], int(args[0]/2), int(args[0]/3)] activation = [T.nnet.sigmoid, T.nnet.softplus, T.nnet.softmax, T.nnet.sigmoid] trn = SimpleTrainer(self.transformer(*args, activation)) try: trn.fit(self.X) except Exception as e: self.fail("Training failed: %s" % str(e)) def test_transformer_random_arguement_int_seed(self): # The transformer should accept a random arguement for initialization # of weights. Here we test an integer seed. trn = SimpleTrainer(self.transformer(*self.args, random=42)) try: trn.fit(self.X) except Exception as e: self.fail("Training failed: %s" % str(e)) def test_transformer_random_arguement_rng(self): # The transformer should accept a random arguement for initialization # of weights. Here we test a random state generator trn = SimpleTrainer(self.transformer(*self.args, random=np.random.RandomState(42))) try: trn.fit(self.X) except Exception as e: self.fail("Training failed: %s" % str(e)) class TestTiedAutoEncoder(unittest.TestCase, TransformerTests): transformer = TiedAutoEncoder args = (100, 50) class TestAutoEncoder(unittest.TestCase, TransformerTests, MultiLayerTransformerMixin): transformer = AutoEncoder args = (100, 50) def test_cost_cross_entropy(self): try: trn = SimpleTrainer(self.transformer(*self.args, cost='cross_entropy')) trn.fit(self.X) except Exception as e: self.fail("Training failed: %s" % str(e)) def test_denoising_mode(self): try: trn = SimpleTrainer(self.transformer(*self.args, corrupt=0.1)) trn.fit(self.X) except Exception as e: self.fail("Training failed: %s" % str(e))<|fim▁end|>
except Exception as e:
<|file_name|>GCodeListDecorator.py<|end_file_name|><|fim▁begin|>from UM.Scene.SceneNodeDecorator import SceneNodeDecorator class GCodeListDecorator(SceneNodeDecorator): def __init__(self): super().__init__() self._gcode_list = [] def getGCodeList(self):<|fim▁hole|> return self._gcode_list def setGCodeList(self, list): self._gcode_list = list<|fim▁end|>
<|file_name|>watch_zeromq.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Copyright (c) 2017 DevicePilot Ltd. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import time from datetime import datetime import zeromq_rx<|fim▁hole|> def printIt(params): print(datetime.now(),str(params)) if __name__ == "__main__": zeromq_rx.init(printIt) print("Watching...") while True: time.sleep(1)<|fim▁end|>
<|file_name|>PathProperty.ts<|end_file_name|><|fim▁begin|>import { Path, PathUtil } from 'app/model/paths'; import { Property } from './Property'; export class PathProperty extends Property<Path> { // @Override setEditableValue(model: any, propertyName: string, value: string) { let path: Path; try { path = new Path(value); } catch (e) { // An error will be thrown if the user attempts to enter an invalid path, // which will occur frequently if they type the path out by hand. return; } model[propertyName] = path; } // @Override getEditableValue(model: any, propertyName: string) { return model[propertyName] ? model[propertyName].getPathString() : ''; } // @Override protected getter(model: any, propertyName: string): Path { return model[`${propertyName}_`]; } // @Override protected setter(model: any, propertyName: string, value: Path | string) { if (!value) { model[`${propertyName}_`] = undefined; return; } model[`${propertyName}_`] = typeof value === 'string' ? new Path(value) : value; } // @Override displayValueForValue(value: Path) { return value ? value.getPathString() : ''; } // @Override interpolateValue(start: Path, end: Path, fraction: number) { if (!start || !end || !start.isMorphableWith(end) || !fraction) { return start; } if (fraction === 1) {<|fim▁hole|> } // @Override cloneValue(value: Path) { return value ? value.mutate().build() : undefined; } // @Override getAnimatorValueType() { return 'pathType'; } // @Override getTypeName() { return 'PathProperty'; } }<|fim▁end|>
return end; } return PathUtil.interpolate(start, end, fraction);
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). use protoc_grpcio; use std::path::{Path, PathBuf}; use build_utils::BuildRoot; use std::collections::HashSet; fn main() { let build_root = BuildRoot::find().unwrap(); let thirdpartyprotobuf = build_root.join("3rdparty/protobuf"); mark_dir_as_rerun_trigger(&thirdpartyprotobuf); let grpcio_output_dir = PathBuf::from("src/gen"); replace_if_changed(&grpcio_output_dir, |path| { generate_for_grpcio(&thirdpartyprotobuf, path); format(path); }); let tower_output_dir = PathBuf::from("src/gen_for_tower"); replace_if_changed(&tower_output_dir, |path| { generate_for_tower(&thirdpartyprotobuf, path); format(path); }); // Re-gen if, say, someone does a git clean on the gen dir but not the target dir. This ensures // generated sources are available for reading by programmers and tools like rustfmt alike. mark_dir_as_rerun_trigger(&grpcio_output_dir); mark_dir_as_rerun_trigger(&tower_output_dir); } fn generate_for_grpcio(thirdpartyprotobuf: &Path, gen_dir: &Path) { let amended_proto_root = add_rustproto_header(&thirdpartyprotobuf).expect("Error adding proto bytes header"); protoc_grpcio::compile_grpc_protos( &[ "build/bazel/remote/execution/v2/remote_execution.proto", "google/bytestream/bytestream.proto", "google/rpc/code.proto", "google/rpc/error_details.proto", "google/rpc/status.proto", "google/longrunning/operations.proto", "google/protobuf/empty.proto", ], &[ amended_proto_root.path().to_owned(), thirdpartyprotobuf.join("standard"), thirdpartyprotobuf.join("rust-protobuf"), ], &gen_dir, ) .expect("Failed to compile protos!"); disable_clippy_in_generated_code(&gen_dir).expect("Failed to strip clippy from generated code"); generate_mod_rs(&gen_dir).expect("Failed to generate mod.rs"); } fn mark_dir_as_rerun_trigger(dir: &Path) { for file in walkdir::WalkDir::new(dir) { println!("cargo:rerun-if-changed={}", file.unwrap().path().display()); } } const EXTRA_HEADER: &'static str = r#"import "rustproto.proto"; option (rustproto.carllerche_bytes_for_bytes_all) = true; "#; /// /// Copies protos from thirdpartyprotobuf, adds a header to make protoc_grpcio uses Bytes instead /// of Vec<u8>s, and rewrites them into a temporary directory /// fn add_rustproto_header(thirdpartyprotobuf: &Path) -> Result<tempfile::TempDir, String> { let amended_proto_root = tempfile::TempDir::new().unwrap(); for f in &["bazelbuild_remote-apis", "googleapis"] { let src_root = thirdpartyprotobuf.join(f); for entry in walkdir::WalkDir::new(&src_root) .into_iter() .filter_map(|entry| entry.ok()) .filter(|entry| entry.file_type().is_file()) .filter(|entry| entry.file_name().to_string_lossy().ends_with(".proto")) { let dst = amended_proto_root .path() .join(entry.path().strip_prefix(&src_root).unwrap()); std::fs::create_dir_all(dst.parent().unwrap()) .map_err(|err| format!("Error making dir in temp proto root: {}", err))?; let original = std::fs::read_to_string(entry.path()) .map_err(|err| format!("Error reading proto {}: {}", entry.path().display(), err))?; let mut copy = String::with_capacity(original.len() + EXTRA_HEADER.len()); for line in original.lines() { copy += line; copy += "\n"; if line.starts_with("package ") { copy += EXTRA_HEADER } } std::fs::write(&dst, copy.as_bytes()) .map_err(|err| format!("Error writing {}: {}", dst.display(), err))?; } } Ok(amended_proto_root) } /// /// protoc_grpcio generates its own clippy config, but it's for an out of date version of clippy, /// so strip that out so we don't get warnings about it. /// /// Add our own #![allow(clippy::all)] heading to each generated file so that we don't get any /// warnings/errors from generated code not meeting our standards. /// fn disable_clippy_in_generated_code(dir: &Path) -> Result<(), String> { for file in walkdir::WalkDir::new(&dir) .into_iter() .filter_map(|entry| entry.ok()) .filter(|entry| { entry.file_type().is_file() && entry.file_name().to_string_lossy().ends_with(".rs") }) { let lines: Vec<_> = std::fs::read_to_string(file.path()) .map_err(|err| { format!( "Error reading generated protobuf at {}: {}", file.path().display(), err ) })? .lines() .filter(|line| !line.contains("clippy")) .map(str::to_owned) .collect(); let content = String::from("#![allow(clippy::all)]\n") + &lines.join("\n"); std::fs::write(file.path(), content).map_err(|err| { format!( "Error re-writing generated protobuf at {}: {}", file.path().display(), err ) })?; } Ok(()) } fn generate_mod_rs(dir: &Path) -> Result<(), String> { let listing = dir.read_dir().unwrap(); let mut pub_mod_stmts = listing .filter_map(|d| d.ok()) .map(|d| d.file_name().to_string_lossy().into_owned()) .filter(|name| &name != &"mod.rs" && &name != &".gitignore") .map(|name| format!("pub mod {};", name.trim_end_matches(".rs"))) .collect::<Vec<_>>(); pub_mod_stmts.sort(); let contents = format!( "\ // This file is generated. Do not edit. {} ", pub_mod_stmts.join("\n") ); std::fs::write(dir.join("mod.rs"), contents) .map_err(|err| format!("Failed to write mod.rs: {}", err)) } fn generate_for_tower(thirdpartyprotobuf: &Path, out_dir: &Path) { tower_grpc_build::Config::new() .enable_server(true) .enable_client(true) .build( &[PathBuf::from( "build/bazel/remote/execution/v2/remote_execution.proto", )], &std::fs::read_dir(&thirdpartyprotobuf) .unwrap() .into_iter() .map(|d| d.unwrap().path()) .collect::<Vec<_>>(), ) .unwrap_or_else(|e| panic!("protobuf compilation failed: {}", e)); let mut dirs_needing_mod_rs = HashSet::new(); dirs_needing_mod_rs.insert(out_dir.to_owned()); for f in walkdir::WalkDir::new(std::env::var("OUT_DIR").unwrap()) .into_iter() .filter_map(|f| f.ok()) .filter(|f| f.path().extension() == Some("rs".as_ref())) { let mut parts: Vec<_> = f .path() .file_name() .unwrap() .to_str() .unwrap() .split('.') .collect(); // pop .rs parts.pop(); let mut dst = out_dir.to_owned(); for part in parts { dst.push(part); dirs_needing_mod_rs.insert(dst.clone()); if !dst.exists() { std::fs::create_dir_all(&dst).unwrap(); }<|fim▁hole|> std::fs::copy(f.path(), dst).unwrap(); } disable_clippy_in_generated_code(out_dir).expect("Failed to strip clippy from generated code"); for dir in &dirs_needing_mod_rs { generate_mod_rs(dir).expect("Failed to write mod.rs"); } } /// /// Replaces contents of directory `path` with contents of directory populated by passed function. /// Does not modify `path` if the contents are identical. /// fn replace_if_changed<F: FnOnce(&Path)>(path: &Path, f: F) { let tempdir = tempfile::TempDir::new().unwrap(); f(tempdir.path()); if !dir_diff::is_different(path, tempdir.path()).unwrap() { return; } if path.exists() { std::fs::remove_dir_all(path).unwrap(); } std::fs::create_dir_all(path.parent().unwrap()).unwrap(); copy_dir::copy_dir(tempdir.path(), path).unwrap(); } fn format(path: &Path) { let mut rustfmt = PathBuf::from(env!("CARGO")); rustfmt.pop(); rustfmt.push("rustfmt"); let mut rustfmt_config = PathBuf::from(env!("CARGO_MANIFEST_DIR")); rustfmt_config.pop(); // bazel_protos rustfmt_config.pop(); // process_execution rustfmt_config.push("rustfmt.toml"); if !rustfmt_config.exists() { panic!("Couldn't find file {}", rustfmt_config.display()); } for file in walkdir::WalkDir::new(path) { let file = file.unwrap(); if file.file_type().is_file() { let success = std::process::Command::new(&rustfmt) .arg(file.path()) .arg("--config-path") .arg(&rustfmt_config) .status() .unwrap() .success(); if !success { panic!("Cargo formatting failed for generated protos. Output should be above."); } } } }<|fim▁end|>
} dirs_needing_mod_rs.remove(&dst); dst = dst.join("mod.rs");
<|file_name|>oncer_test.go<|end_file_name|><|fim▁begin|>package sync import ( "testing" "time" . "github.com/franela/goblin" ) <|fim▁hole|> g := Goblin(t) g.Describe("#Once", func() { g.It("should return nil only once for one id", func() { var id = "1" g.Assert(Once(id) == nil).IsTrue() g.Assert(Once(id) == nil).IsFalse() g.Assert(Once(id) == nil).IsFalse() g.Assert(Once(id) == nil).IsFalse() }) g.It("should return nil again after ttl reached", func() { var id = "2" ttl = time.Millisecond * 500 g.Assert(Once(id) == nil).IsTrue() g.Assert(Once(id) == nil).IsFalse() g.Assert(Once(id) == nil).IsFalse() time.Sleep(time.Second) g.Assert(Once(id) == nil).IsTrue() }) }) }<|fim▁end|>
func TestOnce(t *testing.T) {
<|file_name|>IndividualSelection.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # MLC (Machine Learning Control): A genetic algorithm library to solve chaotic problems # Copyright (C) 2015-2017, Thomas Duriez ([email protected]) # Copyright (C) 2015, Adrian Durán ([email protected]) # Copyright (C) 2015-2017, Ezequiel Torres Feyuk ([email protected]) # Copyright (C) 2016-2017, Marco Germano Zbrun ([email protected]) # Copyright (C) 2016-2017, Raúl Lopez Skuba ([email protected]) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> from BaseCreation import BaseCreation from MLC.db.mlc_repository import MLCRepository class IndividualSelection(BaseCreation): """ Fill a Population with fixed Individuals. selected_individuals: dictionary containing {Individual: positions inside the first population} fill_creator: creator used to fill empty positions. Empty positions inside the Population will be completed using the neighbor individual, """ def __init__(self, selected_individuals, fill_creator):<|fim▁hole|> self.__individuals = [] def create(self, gen_size): self.__fill_creator.create(gen_size) self.__individuals = self.__fill_creator.individuals() # Add Individuals for individual, positions in self.__selected_individuals.items(): for position in positions: if position < gen_size: individual_id, _ = MLCRepository.get_instance().add_individual(individual) self.__individuals[position] = (position, individual_id) def individuals(self): return self.__individuals<|fim▁end|>
BaseCreation.__init__(self) self.__fill_creator = fill_creator self.__selected_individuals = selected_individuals
<|file_name|>timer.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import time class Timer(object): ''' Simple timer control ''' def __init__(self, delay): self.current_time = 0 self.set_delay(delay) def pause(self, pause): if pause >= self.delay: self.current_time = time.clock() self.next_time = self.current_time + pause def set_delay(self, delay): <|fim▁hole|> self.next_time = self.current_time else: self.current_time = time.clock() self.next_time = self.current_time + self.delay def idle(self): ''' Verify if the timer is idle ''' self.current_time = time.clock() ## if next frame occurs in the future, now it's idle time if self.next_time > self.current_time: return True # if pass more than one delay time, synchronize it if (self.current_time - self.next_time) > self.delay: self.next_time = self.current_time + self.delay else: self.next_time += self.delay return False<|fim▁end|>
assert delay >= 0 self.delay = delay if self.delay == 0:
<|file_name|>KML.py<|end_file_name|><|fim▁begin|>import os import sys import time import numpy import logging from stoqs import models as m from django.conf import settings from django.db.models import Avg from django.http import HttpResponse, HttpResponseBadRequest import pprint logger = logging.getLogger(__name__) class InvalidLimits(Exception): pass def readCLT(fileName): ''' Read the color lookup table from disk and return a python list of rgb tuples. ''' cltList = [] for rgb in open(fileName, 'r'): ##logger.debug("rgb = %s", rgb) (r, g, b) = rgb.strip().split() cltList.append([float(r), float(g), float(b)]) return cltList class KML(object): ''' Manage the construcion of KML files from stoqs. Several options may be set on initialization and<|fim▁hole|> Possible kwargs and their default values: @withTimeStamps: True @withLineStrings: True ''' self.request = request self.qs_mp = qs_mp self.qparams = qparams self.stoqs_object_name = stoqs_object_name ##logger.debug('request = %s', request) ##logger.debug('kwargs = %s', kwargs) ##logger.debug('qparams = %s', qparams) if 'withTimeStamps' in kwargs: self.withTimeStampsFlag = kwargs['withTimeStamps'] else: self.withTimeStampsFlag = True if 'withLineStrings' in kwargs: self.withLineStringsFlag = kwargs['withLineStrings'] else: self.withLineStringsFlag = True if 'withFullIconURL' in kwargs: self.withFullIconURLFlag = kwargs['withFullIconURL'] else: self.withFullIconURLFlag = True if 'stride' in kwargs: # If passed in as an argument self.stride = kwargs['stride'] else: # Check if in request, otherwise set it to 1 self.stride = int(self.request.GET.get('stride', 1)) def kmlResponse(self): ''' Return a response that is a KML represenation of the existing MeasuredParameter query that is in self.qs_mp. pName is either the parameter__name or parameter__standard_name string. Use @stride to return a subset of data. ''' response = HttpResponse() if self.qs_mp is None: raise Exception('self.qs_mp is None.') # If both selected parameter__name takes priority over parameter__standard_name. If parameter__id supplied that takes overall precedence. pName = None if 'parameter__standard_name' in self.qparams: pName = self.qparams['parameter__standard_name'] if 'parameter__name' in self.qparams: pName = self.qparams['parameter__name'] if 'parameter__id' in self.qparams: logger.debug('parameter__id = %s', self.qparams['parameter__id']) pName = m.Parameter.objects.using(self.request.META['dbAlias']).get(id=int(self.qparams['parameter__id'])).name logger.debug('pName = %s', pName) if not pName: raise NoDataForKML('parameter__name, parameter__standard_name, nor parameter__id specified') logger.debug('type(self.qs_mp) = %s', type(self.qs_mp)) logger.debug('self.stride = %d', self.stride) logger.debug('self.stoqs_object_name = %s', self.stoqs_object_name) if self.stoqs_object_name == 'measured_parameter': try: # Expect the query set self.qs_mp to be a collection of value lists data = [(mp['measurement__instantpoint__timevalue'], mp['measurement__geom'].x, mp['measurement__geom'].y, mp['measurement__depth'], mp['parameter__name'], mp['datavalue'], mp['measurement__instantpoint__activity__platform__name']) for mp in self.qs_mp[::self.stride]] except TypeError: # Otherwise expect self.qs_mp to be a collection of model instances data = [(mp.measurement.instantpoint.timevalue, mp.measurement.geom.x, mp.measurement.geom.y, mp.measurement.depth, mp.parameter.name, mp.datavalue, mp.measurement.instantpoint.activity.platform.name) for mp in self.qs_mp[::self.stride]] try: folderName = "%s_%.1f_%.1f" % (pName, float(self.qparams['measurement__depth__gte']), float(self.qparams['measurement__depth__lte'])) except KeyError: folderName = "%s_" % (pName,) elif self.stoqs_object_name == 'sampled_parameter': try: # Expect the query set self.qs_mp to be a collection of value lists data = [(mp['sample__instantpoint__timevalue'], mp['sample__geom'].x, mp['sample__geom'].y, mp['sample__depth'], mp['parameter__name'], mp['datavalue'], mp['sample__instantpoint__activity__platform__name']) for mp in self.qs_mp[::self.stride]] except TypeError: # Otherwise expect self.qs_mp to be a collection of model instances data = [(mp.sample.instantpoint.timevalue, mp.sample.geom.x, mp.sample.geom.y, mp.sample.depth, mp.parameter.name, mp.datavalue, mp.sample.instantpoint.activity.platform.name) for mp in self.qs_mp[::self.stride]] try: folderName = "%s_%.1f_%.1f" % (pName, float(self.qparams['sample__depth__gte']), float(self.qparams['sample__depth__lte'])) except KeyError: folderName = "%s_" % (pName,) dataHash = {} for d in data: try: dataHash[d[6]].append(d) except KeyError: dataHash[d[6]] = [] dataHash[d[6]].append(d) if not dataHash: logger.exception('No data collected for making KML within the constraints provided') return response descr = self.request.get_full_path().replace('&', '&amp;') logger.debug(descr) try: kml = self.makeKML(self.request.META['dbAlias'], dataHash, pName, folderName, descr, self.request.GET.get('cmin', None), self.request.GET.get('cmax', None)) except InvalidLimits as e: logger.exception(e) return response response['Content-Type'] = 'application/vnd.google-earth.kml+xml' response.write(kml) return response def makeKML(self, dbAlias, dataHash, pName, title, desc, cmin=None, cmax=None): ''' Generate the KML for the point in mpList cmin and cmax are the color min and max ''' # # Define the color lookup table and the color limits from 2.5 and 97.5 percentiles for each variable # clt = readCLT(os.path.join(settings.STATICFILES_DIRS[0], 'colormaps', 'jetplus.txt')) climHash = {} for p in m.Parameter.objects.using(dbAlias).all().values_list('name'): pn = p[0] qs = m.ActivityParameter.objects.using(dbAlias).filter(parameter__name=pn).aggregate(Avg('p025'), Avg('p975')) climHash[pn] = (qs['p025__avg'], qs['p975__avg'],) ##logger.debug('Color lookup min, max values:\n' + pprint.pformat(climHash)) pointKMLHash = {} lineKMLHash = {} if cmin and cmax: try: clim = (float(cmin), float(cmax),) except ValueError: raise InvalidLimits('Cannot make KML with specified cmin, cmax of %s, %s' % (cmin, cmax)) else: try: clim = climHash[pName] except KeyError as e: logger.warn('Parameter "%s" not in Parameter table in database %s' % (pName, dbAlias)) logger.warn('Setting clim to (-1, 1)') clim = (-1, 1) ##logger.debug('clim = %s', clim) for k in dataHash.keys(): (pointStyleKML, pointKMLHash[k]) = self._buildKMLpoints(k, dataHash[k], clt, clim) if self.withLineStringsFlag: (lineStyleKML, lineKMLHash[k]) = self._buildKMLlines(k, dataHash[k], clt, clim) else: logger.debug('Not drawing LineStrings for platform = %s', k) # # KML header # kml = '''<?xml version="1.0" encoding="UTF-8"?> <kml xmlns="http://www.opengis.net/kml/2.2" xmlns:gx="http://www.google.com/kml/ext/2.2" xmlns:kml="http://www.opengis.net/kml/2.2" xmlns:atom="http://www.w3.org/2005/Atom"> <!-- %s --> <!-- Mike McCann MBARI 28 October 2010 --> <Document> <name>%s</name> <description>%s</description> ''' % ('Automatically generated by STOQS', title, desc) kml += pointStyleKML if self.withLineStringsFlag: kml += lineStyleKML # # See that the platforms are alphabetized in the KML. (The point and line KMLHashes will have the same keys.) # platList = pointKMLHash.keys() platList.sort() for plat in platList: kml += '''<Folder> <name>%s Points</name> %s </Folder>''' % (plat, pointKMLHash[plat]) if self.withLineStringsFlag: kml += '''<Folder> <name>%s Lines</name> %s </Folder>''' % (plat, lineKMLHash[plat]) # # Footer # kml += '''</Document> </kml>''' return kml def _buildKMLlines(self, plat, data, clt, clim): ''' Build KML placemark LineStrings of all the point data in `list` Use distinctive line colors for each platform. the same way as is done in the auvctd dorado science data processing. `data` are the results of a query, say from xySlice() `clt` is a Color Lookup Table equivalent to a jetplus clt as used in Matlab `clim` is a 2 element list equivalent to clim in Matlab Return strings of style and point KML that can be included in a master KML file. ''' styleKml = ''' <Style id="Tethys"> <LineStyle> <color>ff0055ff</color> <width>2</width> </LineStyle> </Style> <Style id="Gulper_AUV"> <LineStyle> <color>ff00ffff</color> <width>2</width> </LineStyle> </Style> <Style id="John Martin"> <LineStyle> <color>ffffffff</color> <width>1</width> </LineStyle> </Style> ''' # # Build the LineString for the points # lineKml = '' lastCoordStr = '' for row in data: (dt, lon, lat, depth, parm, datavalue, platform) = row if lat < -90 or lat > 90: # HACK warning: Fix any accidentally swapped lat & lons foo = lon lon = lat lat = foo coordStr = "%.6f,%.6f,-%.1f" % (lon, lat, depth) if lastCoordStr: if self.withTimeStampsFlag: placemark = """ <Placemark> <TimeStamp> <when>%s</when> </TimeStamp> <LineString> <altitudeMode>absolute</altitudeMode> <coordinates> %s </coordinates> </LineString> </Placemark> """ % (time.strftime("%Y-%m-%dT%H:%M:%SZ", dt.timetuple()), lastCoordStr + ' ' + coordStr) else: placemark = """ <Placemark> <LineString> <altitudeMode>absolute</altitudeMode> <coordinates> %s </coordinates> </LineString> </Placemark> """ % (lastCoordStr + ' ' + coordStr) lineKml += placemark lastCoordStr = coordStr return (styleKml, lineKml) def _buildKMLpoints(self, plat, data, clt, clim): ''' Build KML Placemarks of all the point data in `list` and use colored styles the same way as is done in the auvctd dorado science data processing. `data` are the results of a query, say from xySlice() `clt` is a Color Lookup Table equivalent to a jetplus clt as used in Matlab `clim` is a 2 element list equivalent to clim in Matlab Return strings of style and point KML that can be included in a master KML file. ''' _debug = False # # Build the styles for the colors in clt using clim # if self.withFullIconURLFlag: try: baseURL = self.request.build_absolute_uri('/')[:-1] + '/' + settings.STATIC_URL except KeyError: baseURL = 'http://odss.mbari.org' + '/' + settings.STATIC_URL else: baseURL = settings.STATIC_URL styleKml = '' for c in clt: ge_color = "ff%02x%02x%02x" % ((round(c[2] * 255), round(c[1] * 255), round(c[0] * 255))) if _debug: logger.debug("c = %s", c) logger.debug("ge_color = %s", ge_color) style = '''<Style id="%s"> <IconStyle> <color>%s</color> <scale>0.3</scale> <Icon> <href>%s.png</href> </Icon> </IconStyle> </Style> ''' % (ge_color, ge_color, os.path.join(baseURL, 'colormaps', 'jetplus_dots', ge_color)) styleKml += style # # Build the placemarks for the points # pointKml = '' for row in data: (dt, lon, lat, depth, parm, datavalue, platform) = row if lat < -90 or lat > 90: # HACK Warning: Fix any accidentally swapped lat & lons foo = lon lon = lat lat = foo coordStr = "%.6f, %.6f,-%.1f" % (lon, lat, depth) if _debug: logger.debug("datavalue = %f", float(datavalue)) logger.debug("clim = %s", clim) try: clt_index = int(round((float(datavalue) - clim[0]) * ((len(clt) - 1) / float(numpy.diff(clim))))) except ZeroDivisionError: raise InvalidLimits('cmin and cmax are the same value') except ValueError as e: # Likely: 'cannot convert float NaN to integer' e.g. for altitude outside of terrain coverage continue if clt_index < 0: clt_index = 0; if clt_index > (len(clt) - 1): clt_index = int(len(clt) - 1); if _debug: logger.debug("clt_index = %d", clt_index) ge_color_val = "ff%02x%02x%02x" % ((round(clt[clt_index][2] * 255), round(clt[clt_index][1] * 255), round(clt[clt_index][0] * 255))) if self.withTimeStampsFlag: placemark = """ <Placemark> <styleUrl>#%s</styleUrl> <TimeStamp> <when>%s</when> </TimeStamp> <Point> <altitudeMode>absolute</altitudeMode> <coordinates> %s </coordinates> </Point> </Placemark> """ % (ge_color_val, time.strftime("%Y-%m-%dT%H:%M:%SZ", dt.timetuple()), coordStr) else: placemark = """ <Placemark> <styleUrl>#%s</styleUrl> <Point> <altitudeMode>absolute</altitudeMode> <coordinates> %s </coordinates> </Point> </Placemark> """ % (ge_color_val, coordStr) pointKml += placemark return (styleKml, pointKml) def _buildKMLlabels(self, plat, data, clt, clim): ''' Build KML Placemarks of the last point of the data and give it a label Return strings of style and point KML that can be included in a master KML file. ''' pass<|fim▁end|>
clients can get KML output with the kmlResponse() method. ''' def __init__(self, request, qs_mp, qparams, stoqs_object_name, **kwargs): '''
<|file_name|>jsdoc.js<|end_file_name|><|fim▁begin|>//@flow const {foo, Bar, baz, qux} = require('./jsdoc-exports');<|fim▁hole|> NumberEnum, BooleanEnum, SymbolEnum, } = require('./jsdoc-objects'); /** a JSDoc in the same file */ function x() {} ( ); // ^<|fim▁end|>
const { DefaultedStringEnum, InitializedStringEnum,
<|file_name|>FizzBuzz3Compile.rs<|end_file_name|><|fim▁begin|>pub fn evaluate(Config((d1, w1), (d2, w2)): Config, i: int) -> String { match (i % d1 == 0, i % d2 == 0) { (true, false) => w1.to_string(),<|fim▁hole|> (true, true) => w1.to_string().append(w2), (false, false) => i.to_string(), } }<|fim▁end|>
(false, true) => w2.to_string(),
<|file_name|>systemmonitor.py<|end_file_name|><|fim▁begin|>""" Support for monitoring the local system.. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.systemmonitor/ """ import logging import homeassistant.util.dt as dt_util from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.helpers.entity import Entity REQUIREMENTS = ['psutil==4.0.0'] SENSOR_TYPES = { 'disk_use_percent': ['Disk Use', '%', 'mdi:harddisk'], 'disk_use': ['Disk Use', 'GiB', 'mdi:harddisk'], 'disk_free': ['Disk Free', 'GiB', 'mdi:harddisk'], 'memory_use_percent': ['RAM Use', '%', 'mdi:memory'], 'memory_use': ['RAM Use', 'MiB', 'mdi:memory'], 'memory_free': ['RAM Free', 'MiB', 'mdi:memory'], 'processor_use': ['CPU Use', '%', 'mdi:memory'], 'process': ['Process', '', 'mdi:memory'], 'swap_use_percent': ['Swap Use', '%', 'mdi:harddisk'], 'swap_use': ['Swap Use', 'GiB', 'mdi:harddisk'], 'swap_free': ['Swap Free', 'GiB', 'mdi:harddisk'], 'network_out': ['Sent', 'MiB', 'mdi:server-network'], 'network_in': ['Recieved', 'MiB', 'mdi:server-network'], 'packets_out': ['Packets sent', '', 'mdi:server-network'], 'packets_in': ['Packets recieved', '', 'mdi:server-network'], 'ipv4_address': ['IPv4 address', '', 'mdi:server-network'], 'ipv6_address': ['IPv6 address', '', 'mdi:server-network'], 'last_boot': ['Last Boot', '', 'mdi:clock'], 'since_last_boot': ['Since Last Boot', '', 'mdi:clock'] } _LOGGER = logging.getLogger(__name__) # pylint: disable=unused-argument def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the sensors.""" dev = [] for resource in config['resources']: if 'arg' not in resource: resource['arg'] = '' if resource['type'] not in SENSOR_TYPES: _LOGGER.error('Sensor type: "%s" does not exist', resource['type']) else: dev.append(SystemMonitorSensor(resource['type'], resource['arg'])) add_devices(dev) class SystemMonitorSensor(Entity): """Implementation of a system monitor sensor.""" def __init__(self, sensor_type, argument=''): """Initialize the sensor.""" self._name = SENSOR_TYPES[sensor_type][0] + ' ' + argument self.argument = argument self.type = sensor_type<|fim▁hole|> @property def name(self): """Return the name of the sensor.""" return self._name.rstrip() @property def icon(self): """Icon to use in the frontend, if any.""" return SENSOR_TYPES[self.type][2] @property def state(self): """Return the state of the device.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement # pylint: disable=too-many-branches def update(self): """Get the latest system information.""" import psutil if self.type == 'disk_use_percent': self._state = psutil.disk_usage(self.argument).percent elif self.type == 'disk_use': self._state = round(psutil.disk_usage(self.argument).used / 1024**3, 1) elif self.type == 'disk_free': self._state = round(psutil.disk_usage(self.argument).free / 1024**3, 1) elif self.type == 'memory_use_percent': self._state = psutil.virtual_memory().percent elif self.type == 'memory_use': self._state = round((psutil.virtual_memory().total - psutil.virtual_memory().available) / 1024**2, 1) elif self.type == 'memory_free': self._state = round(psutil.virtual_memory().available / 1024**2, 1) elif self.type == 'swap_use_percent': self._state = psutil.swap_memory().percent elif self.type == 'swap_use': self._state = round(psutil.swap_memory().used / 1024**3, 1) elif self.type == 'swap_free': self._state = round(psutil.swap_memory().free / 1024**3, 1) elif self.type == 'processor_use': self._state = round(psutil.cpu_percent(interval=None)) elif self.type == 'process': if any(self.argument in l.name() for l in psutil.process_iter()): self._state = STATE_ON else: self._state = STATE_OFF elif self.type == 'network_out': self._state = round(psutil.net_io_counters(pernic=True) [self.argument][0] / 1024**2, 1) elif self.type == 'network_in': self._state = round(psutil.net_io_counters(pernic=True) [self.argument][1] / 1024**2, 1) elif self.type == 'packets_out': self._state = psutil.net_io_counters(pernic=True)[self.argument][2] elif self.type == 'packets_in': self._state = psutil.net_io_counters(pernic=True)[self.argument][3] elif self.type == 'ipv4_address': self._state = psutil.net_if_addrs()[self.argument][0][1] elif self.type == 'ipv6_address': self._state = psutil.net_if_addrs()[self.argument][1][1] elif self.type == 'last_boot': self._state = dt_util.datetime_to_date_str( dt_util.as_local( dt_util.utc_from_timestamp(psutil.boot_time()))) elif self.type == 'since_last_boot': self._state = dt_util.utcnow() - dt_util.utc_from_timestamp( psutil.boot_time())<|fim▁end|>
self._state = None self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] self.update()
<|file_name|>test_log_depot_operation.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Tests used to check the operation of log collecting. Author: Milan Falešník <[email protected]> Since: 2013-02-20 """ from datetime import datetime import fauxfactory import pytest import re from cfme import test_requirements from cfme.configure import configuration as configure from utils import conf, testgen from utils.appliance.implementations.ui import navigate_to from utils.blockers import BZ from utils.ftp import FTPClient from utils.providers import get_mgmt from utils.version import current_version from utils.virtual_machines import deploy_template pytestmark = [test_requirements.log_depot] class LogDepotType(object): def __init__(self, protocol, credentials, access_dir=None, path=None): self.protocol = protocol self._param_name = self.protocol self.credentials = credentials self.access_dir = access_dir or "" self.path = path self.machine_ip = None @property def ftp(self): if self.protocol == "anon_ftp": ftp_user_name = "anonymous" ftp_password = "" # case anonymous connection cfme works only with hardcoded "incoming" directory # incoming folder used for https://bugzilla.redhat.com/show_bug.cgi?id=1307019 upload_dir = "incoming" else: ftp_user_name = self.credentials["username"] ftp_password = self.credentials["password"] # if it's not anonymous using predefined credentials upload_dir = "/" return FTPClient(self.machine_ip, ftp_user_name, ftp_password, upload_dir) def pytest_generate_tests(metafunc): """ Parametrizes the logdepot tests according to cfme_data YAML file. YAML structure (shared with db backup tests) is as follows: log_db_depot: credentials: credentials_key protocols: smb: path_on_host: /path/on/host use_for_log_collection: True use_for_db_backups: False nfs: hostname: nfs.example.com/path/on/host use_for_log_collection: False use_for_db_backups: True ftp: hostname: ftp.example.com use_for_log_collection: True """ if metafunc.function.__name__ == 'test_collect_unconfigured': return fixtures = ['log_depot'] data = conf.cfme_data.get("log_db_operations", {}) depots = [] ids = [] creds = conf.credentials[data['credentials']] for protocol, proto_data in data['protocols'].iteritems(): if proto_data['use_for_log_collection']: depots.append([LogDepotType( protocol, creds, proto_data.get('sub_folder'), proto_data.get('path_on_host'))]) ids.append(protocol) if metafunc.function.__name__ in ['test_collect_multiple_servers', "test_collect_single_servers"]: ids = ids[:1] depots = depots[:1] testgen.parametrize(metafunc, fixtures, depots, ids=ids, scope="function") return @pytest.yield_fixture(scope="module") def depot_machine_ip(): """ Deploy vm for depot test This fixture uses for deploy vm on provider from yaml and then receive it's ip After test run vm deletes from provider """ depot_machine_name = "test_long_log_depot_{}".format(fauxfactory.gen_alphanumeric()) data = conf.cfme_data.get("log_db_operations", {}) depot_provider_key = data["log_db_depot_template"]["provider"] depot_template_name = data["log_db_depot_template"]["template_name"] prov = get_mgmt(depot_provider_key) deploy_template(depot_provider_key, depot_machine_name, template_name=depot_template_name) yield prov.get_ip_address(depot_machine_name) prov.delete_vm(depot_machine_name) <|fim▁hole|> temp_appliance_unconfig.appliance_console_cli.configure_appliance_external_join(hostname, app_creds_modscope['username'], app_creds_modscope['password'], 'vmdb_production', hostname, app_creds_modscope['sshlogin'], app_creds_modscope['sshpass']) temp_appliance_unconfig.start_evm_service() temp_appliance_unconfig.wait_for_evm_service() temp_appliance_unconfig.wait_for_web_ui() return temp_appliance_unconfig @pytest.yield_fixture(scope="function") def configured_depot(log_depot, depot_machine_ip): """ Configure selected depot provider This fixture used the trick that the fixtures are cached for given function. So if placed behind the depot_* stuff on the test function, it can actually take the values from them. It also provides a finalizer to disable the depot after test run. """ log_depot.machine_ip = depot_machine_ip uri = log_depot.machine_ip + log_depot.access_dir log_depot = configure.ServerLogDepot(log_depot.protocol, depot_name=fauxfactory.gen_alphanumeric(), uri=uri, username=log_depot.credentials["username"], password=log_depot.credentials["password"] ) log_depot.create() yield log_depot log_depot.clear() def check_ftp(ftp, server_name, server_zone_id): server_string = server_name + "_" + str(server_zone_id) with ftp: # Files must have been created after start with server string in it (for ex. EVM_1) zip_files = ftp.filesystem.search(re.compile(r"^.*{}.*?[.]zip$".format(server_string)), directories=False) assert zip_files, "No logs found!" # Check the times of the files by names datetimes = [] for file in zip_files: # files looks like "Current_region_0_default_1_EVM_1_20170127_043343_20170127_051010.zip" # 20170127_043343 - date and time date = file.name.split("_") date_from = date[7] + date[8] # removing ".zip" from last item date_to = date[9] + date[10][:-4] try: date_from = datetime.strptime(date_from, "%Y%m%d%H%M%S") date_to = datetime.strptime(date_to, "%Y%m%d%H%M%S") except ValueError: assert False, "Wrong file matching of {}".format(file.name) datetimes.append((date_from, date_to, file.name)) # Check for the gaps if len(datetimes) > 1: for i in range(len(datetimes) - 1): dt = datetimes[i + 1][0] - datetimes[i][1] assert dt.total_seconds() >= 0.0, \ "Negative gap between log files ({}, {})".format( datetimes[i][2], datetimes[i + 1][2]) @pytest.mark.tier(3) @pytest.mark.nondestructive @pytest.mark.meta(blockers=[BZ(1341502, unblock=lambda log_depot: log_depot.protocol != "anon_ftp", forced_streams=["5.6", "5.7", "5.8", "upstream"])] ) def test_collect_log_depot(log_depot, appliance, configured_depot, request): """ Boilerplate test to verify functionality of this concept Will be extended and improved. """ # Wipe the FTP contents in the end @request.addfinalizer def _clear_ftp(): with log_depot.ftp as ftp: ftp.cwd(ftp.upload_dir) ftp.recursively_delete() # Prepare empty workspace with log_depot.ftp as ftp: # move to upload folder ftp.cwd(ftp.upload_dir) # delete all files ftp.recursively_delete() # Start the collection configured_depot.collect_all() # Check it on FTP check_ftp(log_depot.ftp, appliance.server_name(), appliance.server_zone_id()) @pytest.mark.meta(blockers=[BZ(1436367, forced_streams=["5.8"])]) @pytest.mark.tier(3) def test_collect_unconfigured(appliance): """ Test checking is collect button enable and disable after log depot was configured """ log_credentials = configure.ServerLogDepot("anon_ftp", depot_name=fauxfactory.gen_alphanumeric(), uri=fauxfactory.gen_alphanumeric()) log_credentials.create() view = navigate_to(appliance.server, 'DiagnosticsCollectLogs') # check button is enable after adding log depot assert view.collect.item_enabled('Collect all logs') is True log_credentials.clear() # check button is disable after removing log depot assert view.collect.item_enabled('Collect all logs') is False @pytest.mark.uncollectif(lambda from_slave: from_slave and BZ.bugzilla.get_bug(1443927).is_opened and current_version() >= '5.8') @pytest.mark.meta(blockers=[BZ(1436367, forced_streams=["5.8"])]) @pytest.mark.parametrize('from_slave', [True, False], ids=['from_slave', 'from_master']) @pytest.mark.parametrize('zone_collect', [True, False], ids=['zone_collect', 'server_collect']) @pytest.mark.parametrize('collect_type', ['all', 'current'], ids=['collect_all', 'collect_current']) @pytest.mark.tier(3) def test_collect_multiple_servers(log_depot, temp_appliance_preconfig, depot_machine_ip, request, configured_external_appliance, zone_collect, collect_type, from_slave): appliance = temp_appliance_preconfig log_depot.machine_ip = depot_machine_ip @request.addfinalizer def _clear_ftp(): with log_depot.ftp as ftp: ftp.cwd(ftp.upload_dir) ftp.recursively_delete() # Prepare empty workspace with log_depot.ftp as ftp: # move to upload folder ftp.cwd(ftp.upload_dir) # delete all files ftp.recursively_delete() with appliance: uri = log_depot.machine_ip + log_depot.access_dir depot = configure.ServerLogDepot(log_depot.protocol, depot_name=fauxfactory.gen_alphanumeric(), uri=uri, username=log_depot.credentials["username"], password=log_depot.credentials["password"], second_server_collect=from_slave, zone_collect=zone_collect ) depot.create() if collect_type == 'all': depot.collect_all() else: depot.collect_current() if from_slave and zone_collect: check_ftp(log_depot.ftp, appliance.slave_server_name(), appliance.slave_server_zone_id()) check_ftp(log_depot.ftp, appliance.server_name(), appliance.server_zone_id()) elif from_slave: check_ftp(log_depot.ftp, appliance.slave_server_name(), appliance.slave_server_zone_id()) else: check_ftp(log_depot.ftp, appliance.server_name(), appliance.server_zone_id()) @pytest.mark.meta(blockers=[BZ(1436367, forced_streams=["5.8"])]) @pytest.mark.parametrize('zone_collect', [True, False], ids=['zone_collect', 'server_collect']) @pytest.mark.parametrize('collect_type', ['all', 'current'], ids=['collect_all', 'collect_current']) @pytest.mark.tier(3) def test_collect_single_servers(log_depot, appliance, depot_machine_ip, request, zone_collect, collect_type): log_depot.machine_ip = depot_machine_ip @request.addfinalizer def _clear_ftp(): with log_depot.ftp as ftp: ftp.cwd(ftp.upload_dir) ftp.recursively_delete() # Prepare empty workspace with log_depot.ftp as ftp: # move to upload folder ftp.cwd(ftp.upload_dir) # delete all files ftp.recursively_delete() uri = log_depot.machine_ip + log_depot.access_dir depot = configure.ServerLogDepot(log_depot.protocol, depot_name=fauxfactory.gen_alphanumeric(), uri=uri, username=log_depot.credentials["username"], password=log_depot.credentials["password"], zone_collect=zone_collect ) depot.create() if collect_type == 'all': depot.collect_all() else: depot.collect_current() check_ftp(log_depot.ftp, appliance.server_name(), appliance.server_zone_id())<|fim▁end|>
@pytest.fixture(scope="module") def configured_external_appliance(temp_appliance_preconfig, app_creds_modscope, temp_appliance_unconfig): hostname = temp_appliance_preconfig.address
<|file_name|>namespace_linux.go<|end_file_name|><|fim▁begin|>package osl import ( "fmt" "io/ioutil" "net" "os" "os/exec" "path/filepath" "runtime" "strconv" "strings" "sync" "syscall" "time" "github.com/docker/docker/pkg/reexec" "github.com/docker/libnetwork/ns" "github.com/docker/libnetwork/types" "github.com/sirupsen/logrus" "github.com/vishvananda/netlink" "github.com/vishvananda/netns" ) const defaultPrefix = "/var/run/docker" func init() { reexec.Register("set-ipv6", reexecSetIPv6) } var ( once sync.Once garbagePathMap = make(map[string]bool) gpmLock sync.Mutex gpmWg sync.WaitGroup gpmCleanupPeriod = 60 * time.Second gpmChan = make(chan chan struct{}) prefix = defaultPrefix ) // The networkNamespace type is the linux implementation of the Sandbox // interface. It represents a linux network namespace, and moves an interface // into it when called on method AddInterface or sets the gateway etc. type networkNamespace struct { path string iFaces []*nwIface gw net.IP gwv6 net.IP staticRoutes []*types.StaticRoute neighbors []*neigh nextIfIndex map[string]int isDefault bool nlHandle *netlink.Handle loV6Enabled bool sync.Mutex } // SetBasePath sets the base url prefix for the ns path func SetBasePath(path string) { prefix = path } func init() { reexec.Register("netns-create", reexecCreateNamespace) } func basePath() string { return filepath.Join(prefix, "netns") } func createBasePath() { err := os.MkdirAll(basePath(), 0755) if err != nil { panic("Could not create net namespace path directory") } // Start the garbage collection go routine go removeUnusedPaths() } func removeUnusedPaths() { gpmLock.Lock() period := gpmCleanupPeriod gpmLock.Unlock() ticker := time.NewTicker(period) for { var ( gc chan struct{} gcOk bool ) select { case <-ticker.C: case gc, gcOk = <-gpmChan: } gpmLock.Lock() pathList := make([]string, 0, len(garbagePathMap)) for path := range garbagePathMap { pathList = append(pathList, path) } garbagePathMap = make(map[string]bool) gpmWg.Add(1) gpmLock.Unlock() for _, path := range pathList { os.Remove(path) } gpmWg.Done() if gcOk { close(gc) } } } func addToGarbagePaths(path string) { gpmLock.Lock() garbagePathMap[path] = true gpmLock.Unlock() } func removeFromGarbagePaths(path string) { gpmLock.Lock() delete(garbagePathMap, path) gpmLock.Unlock() } // GC triggers garbage collection of namespace path right away // and waits for it. func GC() { gpmLock.Lock() if len(garbagePathMap) == 0 { // No need for GC if map is empty gpmLock.Unlock() return } gpmLock.Unlock() // if content exists in the garbage paths // we can trigger GC to run, providing a // channel to be notified on completion waitGC := make(chan struct{}) gpmChan <- waitGC // wait for GC completion <-waitGC } // GenerateKey generates a sandbox key based on the passed // container id. func GenerateKey(containerID string) string { maxLen := 12 // Read sandbox key from host for overlay if strings.HasPrefix(containerID, "-") { var ( index int indexStr string tmpkey string ) dir, err := ioutil.ReadDir(basePath()) if err != nil { return "" } for _, v := range dir { id := v.Name() if strings.HasSuffix(id, containerID[:maxLen-1]) { indexStr = strings.TrimSuffix(id, containerID[:maxLen-1]) tmpindex, err := strconv.Atoi(indexStr) if err != nil { return "" } if tmpindex > index { index = tmpindex tmpkey = id } } } containerID = tmpkey if containerID == "" { return "" } } if len(containerID) < maxLen { maxLen = len(containerID) } return basePath() + "/" + containerID[:maxLen] } // NewSandbox provides a new sandbox instance created in an os specific way // provided a key which uniquely identifies the sandbox func NewSandbox(key string, osCreate, isRestore bool) (Sandbox, error) { if !isRestore { err := createNetworkNamespace(key, osCreate) if err != nil { return nil, err } } else { once.Do(createBasePath) } n := &networkNamespace{path: key, isDefault: !osCreate, nextIfIndex: make(map[string]int)} sboxNs, err := netns.GetFromPath(n.path) if err != nil { return nil, fmt.Errorf("failed get network namespace %q: %v", n.path, err) } defer sboxNs.Close() n.nlHandle, err = netlink.NewHandleAt(sboxNs, syscall.NETLINK_ROUTE) if err != nil { return nil, fmt.Errorf("failed to create a netlink handle: %v", err) } err = n.nlHandle.SetSocketTimeout(ns.NetlinkSocketsTimeout) if err != nil { logrus.Warnf("Failed to set the timeout on the sandbox netlink handle sockets: %v", err) } // As starting point, disable IPv6 on all interfaces if !n.isDefault { err = setIPv6(n.path, "all", false) if err != nil { logrus.Warnf("Failed to disable IPv6 on all interfaces on network namespace %q: %v", n.path, err) } } if err = n.loopbackUp(); err != nil { n.nlHandle.Delete() return nil, err } return n, nil } func (n *networkNamespace) InterfaceOptions() IfaceOptionSetter { return n } func (n *networkNamespace) NeighborOptions() NeighborOptionSetter { return n } func mountNetworkNamespace(basePath string, lnPath string) error { return syscall.Mount(basePath, lnPath, "bind", syscall.MS_BIND, "") } // GetSandboxForExternalKey returns sandbox object for the supplied path func GetSandboxForExternalKey(basePath string, key string) (Sandbox, error) { if err := createNamespaceFile(key); err != nil { return nil, err } if err := mountNetworkNamespace(basePath, key); err != nil { return nil, err } n := &networkNamespace{path: key, nextIfIndex: make(map[string]int)} sboxNs, err := netns.GetFromPath(n.path) if err != nil { return nil, fmt.Errorf("failed get network namespace %q: %v", n.path, err) } defer sboxNs.Close() n.nlHandle, err = netlink.NewHandleAt(sboxNs, syscall.NETLINK_ROUTE) if err != nil { return nil, fmt.Errorf("failed to create a netlink handle: %v", err) } err = n.nlHandle.SetSocketTimeout(ns.NetlinkSocketsTimeout) if err != nil { logrus.Warnf("Failed to set the timeout on the sandbox netlink handle sockets: %v", err) } // As starting point, disable IPv6 on all interfaces err = setIPv6(n.path, "all", false) if err != nil { logrus.Warnf("Failed to disable IPv6 on all interfaces on network namespace %q: %v", n.path, err) } if err = n.loopbackUp(); err != nil { n.nlHandle.Delete() return nil, err } return n, nil } func reexecCreateNamespace() { if len(os.Args) < 2 { logrus.Fatal("no namespace path provided") } if err := mountNetworkNamespace("/proc/self/ns/net", os.Args[1]); err != nil { logrus.Fatal(err) } } func createNetworkNamespace(path string, osCreate bool) error { if err := createNamespaceFile(path); err != nil { return err } cmd := &exec.Cmd{ Path: reexec.Self(), Args: append([]string{"netns-create"}, path), Stdout: os.Stdout, Stderr: os.Stderr, } if osCreate { cmd.SysProcAttr = &syscall.SysProcAttr{} cmd.SysProcAttr.Cloneflags = syscall.CLONE_NEWNET } if err := cmd.Run(); err != nil { return fmt.Errorf("namespace creation reexec command failed: %v", err) } return nil } func unmountNamespaceFile(path string) { if _, err := os.Stat(path); err == nil { syscall.Unmount(path, syscall.MNT_DETACH) } } func createNamespaceFile(path string) (err error) { var f *os.File once.Do(createBasePath) // Remove it from garbage collection list if present removeFromGarbagePaths(path) // If the path is there unmount it first unmountNamespaceFile(path) // wait for garbage collection to complete if it is in progress // before trying to create the file. gpmWg.Wait() if f, err = os.Create(path); err == nil { f.Close() } return err } func (n *networkNamespace) loopbackUp() error { iface, err := n.nlHandle.LinkByName("lo") if err != nil { return err } return n.nlHandle.LinkSetUp(iface) } func (n *networkNamespace) AddLoopbackAliasIP(ip *net.IPNet) error { iface, err := n.nlHandle.LinkByName("lo") if err != nil { return err } return n.nlHandle.AddrAdd(iface, &netlink.Addr{IPNet: ip}) } func (n *networkNamespace) RemoveLoopbackAliasIP(ip *net.IPNet) error { iface, err := n.nlHandle.LinkByName("lo") if err != nil { return err } return n.nlHandle.AddrDel(iface, &netlink.Addr{IPNet: ip}) } func (n *networkNamespace) InvokeFunc(f func()) error { return nsInvoke(n.nsPath(), func(nsFD int) error { return nil }, func(callerFD int) error { f() return nil }) } // InitOSContext initializes OS context while configuring network resources func InitOSContext() func() { runtime.LockOSThread() if err := ns.SetNamespace(); err != nil { logrus.Error(err) } return runtime.UnlockOSThread } func nsInvoke(path string, prefunc func(nsFD int) error, postfunc func(callerFD int) error) error { defer InitOSContext()() newNs, err := netns.GetFromPath(path) if err != nil { return fmt.Errorf("failed get network namespace %q: %v", path, err) } defer newNs.Close() // Invoked before the namespace switch happens but after the namespace file // handle is obtained. if err := prefunc(int(newNs)); err != nil { return fmt.Errorf("failed in prefunc: %v", err) } if err = netns.Set(newNs); err != nil { return err } defer ns.SetNamespace() // Invoked after the namespace switch. return postfunc(ns.ParseHandlerInt()) } func (n *networkNamespace) nsPath() string { n.Lock() defer n.Unlock() return n.path } func (n *networkNamespace) Info() Info { return n } func (n *networkNamespace) Key() string { return n.path } func (n *networkNamespace) Destroy() error { if n.nlHandle != nil { n.nlHandle.Delete() } // Assuming no running process is executing in this network namespace, // unmounting is sufficient to destroy it. if err := syscall.Unmount(n.path, syscall.MNT_DETACH); err != nil { return err } // Stash it into the garbage collection list addToGarbagePaths(n.path) return nil } // Restore restore the network namespace func (n *networkNamespace) Restore(ifsopt map[string][]IfaceOption, routes []*types.StaticRoute, gw net.IP, gw6 net.IP) error { // restore interfaces for name, opts := range ifsopt { if !strings.Contains(name, "+") { return fmt.Errorf("wrong iface name in restore osl sandbox interface: %s", name) } seps := strings.Split(name, "+") srcName := seps[0] dstPrefix := seps[1] i := &nwIface{srcName: srcName, dstName: dstPrefix, ns: n} i.processInterfaceOptions(opts...) if i.master != "" { i.dstMaster = n.findDst(i.master, true) if i.dstMaster == "" { return fmt.Errorf("could not find an appropriate master %q for %q", i.master, i.srcName) } } if n.isDefault { i.dstName = i.srcName } else { links, err := n.nlHandle.LinkList() if err != nil { return fmt.Errorf("failed to retrieve list of links in network namespace %q during restore", n.path) } // due to the docker network connect/disconnect, so the dstName should // restore from the namespace for _, link := range links { addrs, err := n.nlHandle.AddrList(link, netlink.FAMILY_V4) if err != nil { return err } ifaceName := link.Attrs().Name if strings.HasPrefix(ifaceName, "vxlan") { if i.dstName == "vxlan" { i.dstName = ifaceName break } } // find the interface name by ip if i.address != nil { for _, addr := range addrs { if addr.IPNet.String() == i.address.String() { i.dstName = ifaceName break } continue } if i.dstName == ifaceName { break } } // This is to find the interface name of the pair in overlay sandbox if strings.HasPrefix(ifaceName, "veth") { if i.master != "" && i.dstName == "veth" { i.dstName = ifaceName } }<|fim▁hole|> } var index int indexStr := strings.TrimPrefix(i.dstName, dstPrefix) if indexStr != "" { index, err = strconv.Atoi(indexStr) if err != nil { return err } } index++ n.Lock() if index > n.nextIfIndex[dstPrefix] { n.nextIfIndex[dstPrefix] = index } n.iFaces = append(n.iFaces, i) n.Unlock() } } // restore routes for _, r := range routes { n.Lock() n.staticRoutes = append(n.staticRoutes, r) n.Unlock() } // restore gateway if len(gw) > 0 { n.Lock() n.gw = gw n.Unlock() } if len(gw6) > 0 { n.Lock() n.gwv6 = gw6 n.Unlock() } return nil } // Checks whether IPv6 needs to be enabled/disabled on the loopback interface func (n *networkNamespace) checkLoV6() { var ( enable = false action = "disable" ) n.Lock() for _, iface := range n.iFaces { if iface.AddressIPv6() != nil { enable = true action = "enable" break } } n.Unlock() if n.loV6Enabled == enable { return } if err := setIPv6(n.path, "lo", enable); err != nil { logrus.Warnf("Failed to %s IPv6 on loopback interface on network namespace %q: %v", action, n.path, err) } n.loV6Enabled = enable } func reexecSetIPv6() { runtime.LockOSThread() defer runtime.UnlockOSThread() if len(os.Args) < 3 { logrus.Errorf("invalid number of arguments for %s", os.Args[0]) os.Exit(1) } ns, err := netns.GetFromPath(os.Args[1]) if err != nil { logrus.Errorf("failed get network namespace %q: %v", os.Args[1], err) os.Exit(2) } defer ns.Close() if err = netns.Set(ns); err != nil { logrus.Errorf("setting into container netns %q failed: %v", os.Args[1], err) os.Exit(3) } var ( action = "disable" value = byte('1') path = fmt.Sprintf("/proc/sys/net/ipv6/conf/%s/disable_ipv6", os.Args[2]) ) if os.Args[3] == "true" { action = "enable" value = byte('0') } if err = ioutil.WriteFile(path, []byte{value, '\n'}, 0644); err != nil { logrus.Errorf("failed to %s IPv6 forwarding for container's interface %s: %v", action, os.Args[2], err) os.Exit(4) } os.Exit(0) } func setIPv6(path, iface string, enable bool) error { cmd := &exec.Cmd{ Path: reexec.Self(), Args: append([]string{"set-ipv6"}, path, iface, strconv.FormatBool(enable)), Stdout: os.Stdout, Stderr: os.Stderr, } if err := cmd.Run(); err != nil { return fmt.Errorf("reexec to set IPv6 failed: %v", err) } return nil }<|fim▁end|>
<|file_name|>jsxFactoryMissingErrorInsideAClass.js<|end_file_name|><|fim▁begin|>//// [test.tsx] export class C { factory() { return <div></div>; } } //// [test.js] "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.C = void 0; class C { factory() { return factory.createElement("div", null); } } <|fim▁hole|><|fim▁end|>
exports.C = C;
<|file_name|>boyer_moore_memchr.rs<|end_file_name|><|fim▁begin|>use memchr::memchr; use std::cmp::max; use skip_search::*; use super::SearchIn; pub struct BoyerMooreMemchr <'a> { needle: &'a [u8], bad_chars: [usize; 256], good_suffixes: Vec<usize> } <|fim▁hole|> needle: needle, bad_chars: build_bad_chars_table(&needle), good_suffixes: build_good_suffixes_table(&needle) } } } impl <'a> SearchIn<'a, [u8]> for BoyerMooreMemchr<'a> { type Iter = BoyerMooreMemchrIter<'a>; fn find_in(&'a self, haystack: &'a [u8]) -> BoyerMooreMemchrIter<'a> { BoyerMooreMemchrIter { searcher: &self, haystack: haystack, position: 0, overlapping_matches: false, } } fn find_overlapping_in(&'a self, haystack: &'a [u8]) -> BoyerMooreMemchrIter<'a> { BoyerMooreMemchrIter { searcher: &self, haystack: &haystack, position: 0, overlapping_matches: true } } } impl <'a> SkipSearch<u8> for &'a BoyerMooreMemchr <'a> { #[inline] default fn skip_offset(&self, bad_char: u8, needle_position: usize, haystack: &[u8], haystack_position: usize) -> usize { let skip = max(self.bad_chars[bad_char as usize], self.good_suffixes[needle_position]); if skip < self.needle.len() { skip } else { let last_char = self.needle[self.needle.len() - 1]; let search_position = haystack_position + 2 * self.needle.len() - 1; memchr(last_char, &haystack[search_position .. ]).map(|x| x + 1).unwrap_or(haystack.len()) } } #[inline] fn len(&self) -> usize { self.needle.len() } #[inline] fn char_at(&self, index: usize) -> u8 { self.needle[index] } } pub struct BoyerMooreMemchrIter <'a> { searcher: &'a BoyerMooreMemchr<'a>, haystack: &'a [u8], position: usize, overlapping_matches: bool, } impl <'a> Iterator for BoyerMooreMemchrIter<'a> { type Item = usize; fn next(&mut self) -> Option<usize> { find_from_position(&self.searcher, &self.haystack, self.position) .and_then(|position| { if self.overlapping_matches { self.position = position + 1; } else { self.position = position + self.searcher.needle.len(); } Some(position) }) } } #[cfg(test)] pub mod test { use super::*; use super::super::{SearchIn, CountIn}; #[test] pub fn test_simple() { let needle = BoyerMooreMemchr::new(b"ghi"); let haystack = b"abc def ghi jkl"; assert_eq!(Some(8), needle.find_first_in(haystack)); } #[test] pub fn test_bad_char() { let haystack = b"acacacababadabacacad"; assert_eq!(Some(12), BoyerMooreMemchr::new(b"abacac").find_first_in(haystack)); } #[test] pub fn test_bad_char2() { let needle = BoyerMooreMemchr::new(b"abacab"); let haystack = b"acacacababadabacabad"; assert_eq!(Some(12), needle.find_first_in(haystack)); } #[test] pub fn test_search_twice() { let needle = BoyerMooreMemchr::new(b"xyz"); let haystack = b"01xyzxyz901xyz56xyz"; assert_eq!(Some(2), needle.find_first_in(haystack)); assert_eq!(Some(2), needle.find_first_in(haystack)); } #[test] pub fn test_iter() { let needle = BoyerMooreMemchr::new(b"xyz"); let haystack = b"01xyzxyz890xyz45xyz"; assert_eq!(vec![2,5,11,16], needle.find_in(haystack).collect::<Vec<usize>>()); } #[test] pub fn test_overlapping() { let needle = BoyerMooreMemchr::new(b"aaba"); let haystack = b"aabaabaabaabaaba"; assert_eq!(vec![0,3,6,9,12], needle.find_overlapping_in(haystack).collect::<Vec<usize>>()); } #[test] pub fn test_non_overlapping() { let needle = BoyerMooreMemchr::new(b"aaba"); let haystack = b"aabaabaabaabaaba"; assert_eq!(vec![0,6,12], needle.find_in(haystack).collect::<Vec<usize>>()); } #[test] pub fn test_occurs_in() { let needle = BoyerMooreMemchr::new(b"abc"); let haystack = b"xxxxxxabcxxxxabc"; assert_eq!(true, needle.occurs_in(haystack)); } #[test] pub fn test_not_occurs_in() { let needle = BoyerMooreMemchr::new(b"abc"); let haystack = b"xxxxxxabacxxxxaba"; assert_eq!(false, needle.occurs_in(haystack)); } #[test] pub fn test_count() { let needle = BoyerMooreMemchr::new(b"sea"); let haystack = b"She sells sea shells on the sea shore."; assert_eq!(2, needle.count_in(haystack)); } }<|fim▁end|>
impl <'a> BoyerMooreMemchr <'a> { pub fn new(needle: &'a [u8]) -> BoyerMooreMemchr { BoyerMooreMemchr {
<|file_name|>load.js<|end_file_name|><|fim▁begin|>var loadState = { preload: function() { /* Load all game assets Place your load bar, some messages. In this case of loading, only text is placed...<|fim▁hole|> //Load your images, spritesheets, bitmaps... game.load.image('kayle', 'assets/img/kayle.png'); game.load.image('tree', 'assets/img/tree.png'); game.load.image('rock', 'assets/img/rock.png'); game.load.image('undefined', 'assets/img/undefined.png'); game.load.image('grass', 'assets/img/grass.png'); game.load.image('player', 'assets/img/player.png'); game.load.image('btn-play','assets/img/btn-play.png'); game.load.image('btn-load','assets/img/btn-play.png'); //Load your sounds, efx, music... //Example: game.load.audio('rockas', 'assets/snd/rockas.wav'); //Load your data, JSON, Querys... //Example: game.load.json('version', 'http://phaser.io/version.json'); }, create: function() { game.stage.setBackgroundColor('#DEDEDE'); game.scale.fullScreenScaleMode = Phaser.ScaleManager.EXACT_FIT; game.state.start('menu'); } };<|fim▁end|>
*/ var loadingLabel = game.add.text(80, 150, 'loading...', {font: '30px Courier', fill: '#fff'});
<|file_name|>lien.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- ##This file is part of pySequence ############################################################################# ############################################################################# ## ## ## pysequence ## ## ## ############################################################################# ############################################################################# ## Copyright (C) 2014 Cédrick FAURY - Jean-Claude FRICOU ## ## pyS�quence : aide � la construction ## de S�quences et Progressions p�dagogiques ## et � la validation de Projets # pySequence is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # pySequence is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pySequence; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ module lien *********** """ import os, sys, subprocess import wx import re from util_path import toFileEncoding, toSystemEncoding, SYSTEM_ENCODING from widgets import messageErreur, scaleImage, Grammaire, img2str, str2img import images from drag_file import * from util_path import * from file2bmp import * # from dpi_aware import * SSCALE = 1.0 if sys.platform == 'darwin': def openFolder(path): subprocess.check_call(['open', '--', path]) elif sys.platform == 'linux2': def openFolder(path): subprocess.check_call(['xdg-open', '--', path]) elif sys.platform == 'win32': def openFolder(path): # subprocess.Popen(["explorer", path], shell=True) subprocess.call(['explorer', path.encode(sys.getfilesystemencoding())], shell=True) #################################################################################### # # Objet lien vers un fichier, un dossier ou bien un site web # #################################################################################### regex = re.compile( r'^(?:http|ftp)s?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... r'localhost|' #localhost... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) class Lien(): def __init__(self, path = "", typ = ""): self.path = path # Impérativement toujours encod� en FILE_ENCODING !! self.type = typ # Type de lien ('d' = dossier, 'f' = fichier, 'u' = url) self.ok = False # Etat du lien (False = lien rompu) self.abs = False # Lien absolu (défaut = lien relatif) ###################################################################################### def __repr__(self): return self.type + " : " + toSystemEncoding(self.path) ###################################################################################### def reset(self): self.path = "" self.type = "" self.ok = False self.abs = False ###################################################################################### def setPath(self, path): self.path = path ###################################################################################### def __neq__(self, l): if self.type != l.type: return True elif self.path != l.path: return True return False ###################################################################################### def __eq__(self, lien): return self.path == lien.path ###################################################################################### def DialogCreer(self, pathref): dlg = URLDialog(None, self, pathref) dlg.ShowModal() dlg.Destroy() ###################################################################################### def Afficher(self, pathref, fenSeq = None): """ Lance l'affichage du contenu du lien <pathref> = chemin de l'application pour déterminer le chemin absolu """ t = self.getTexte() print("Afficher", self.type, self.path) path = self.GetAbsPath(pathref) # print " ", path # print " ", path.decode("unicode-escape") # print " ", path.encode(sys.getfilesystemencoding()) if self.type == "f": if os.path.exists(path): try: os.startfile(path) except: messageErreur(None, "Ouverture impossible", "Impossible d'ouvrir le fichier\n\n%s\n" %toSystemEncoding(path)) else: messageErreur(None, "Chemin non trouvé", "Le fichiern'a pas été trouvé\n\n%s" %toSystemEncoding(path)) elif self.type == 'd': if os.path.isdir(path): openFolder(path) # try: # # subprocess.Popen(["explorer", path]) # # except: # messageErreur(None, u"Ouverture impossible", # u"Impossible d'acc�der au dossier\n\n%s\n" %toSystemEncoding(path)) else: messageErreur(None, "Chemin non trouvé", "Le dossiern'a pas été trouvé\n\n%s" %toSystemEncoding(path)) elif self.type == 'u': try: webbrowser.open(self.path) except: messageErreur(None, "Ouverture impossible", "Impossible d'ouvrir l'url\n\n%s\n" %toSystemEncoding(self.path)) elif self.type == 's': if os.path.isfile(path): # self.Show(False) child = fenSeq.commandeNouveau() child.ouvrir(path) ###################################################################################### def isOk(self): self.EvalTypeLien() return self.ok ###################################################################################### def EvalTypeLien(self, pathref = ""): """ Evaluation du de self.lien.path par rapport à pathref et attribue un type """ # print("EvalTypeLien\n ", self.path, "\n ", pathref) abspath = self.GetAbsPath(pathref) if os.path.exists(abspath): if os.path.isfile(abspath): self.type = 'f' elif os.path.isdir(abspath): self.type = 'd' # if not self.abs: # self.path = relpath # else: # self.path = abspath self.ok = True elif re.match(regex, self.path): self.type = 'u' self.ok = True else: self.type = '' self.ok = False return ###################################################################################### def EvalLien(self, path, pathref): """ Teste la validité du chemin <path> (SYSTEM_ENCODING) par rapport au dossier de référence <pathref> (FILE_ENCODING) et change self.path (FILE_ENCODING) """ # print("EvalLien", path, pathref, os.path.exists(pathref)) # print " >", chardet.detect(bytes(path)) # print " >", chardet.detect(bytes(pathref)) if path == "" or path.split() == []: self.reset() return self.EvalTypeLien(pathref) ###################################################################################### def GetAbsPath(self, pathdoc, path = None): """ Renvoie le chemin absolu du lien grace au chemin du document <pathdoc> """ # print("GetAbsPath", path, pathref) if path == None: path = self.path # if path == ".": # return pathdoc cwd = os.getcwd() if pathdoc != "": try: os.chdir(pathdoc) except: pass # print os.path.exists(path) # print os.path.exists(os.path.abspath(path)) # print os.path.exists(os.path.abspath(path).decode(util_path.FILE_ENCODING)) # Immonde bricolage !! # if os.path.exists(os.path.abspath(path)) and os.path.exists(os.path.abspath(path)):#.decode(util_path.FILE_ENCODING)): # path = path.decode(util_path.FILE_ENCODING) path = os.path.abspath(path)#.decode(util_path.FILE_ENCODING) # print(" abs >", path) if os.path.exists(path): path = path else: # print(path, "n'existe pas !") try: path = os.path.join(pathdoc, path) except UnicodeDecodeError: pathdoc = toFileEncoding(pathdoc) path = os.path.join(pathdoc, path) os.chdir(cwd) return path ###################################################################################### def GetRelPath(self, pathdoc, path = None): """ Renvoie le chemin relatif du lien grace au chemin du document <pathdoc> """ if path == None: path = self.path if self.type != 'f' and self.type != 'd': return path # path = self.GetEncode(path) if os.path.exists(path): path = path else: try: path = os.path.join(pathdoc, path) except UnicodeDecodeError: pathdoc = toFileEncoding(pathdoc) path = os.path.join(pathdoc, path) return path ############################################################################################### def getTexte(self): if self.type == 'd': t = "dossier(s)$m" elif self.type == 'f': t = "fichier(s)$m" elif self.type == 'u': t = "URL(s)$f" else: t = "" return Grammaire(t) ############################################################################################### def getNomFichier(self): return os.path.splitext(os.path.basename(self.path))[0] ###################################################################################### def getBranche(self, branche): # branche.set("Lien", toSystemEncoding(os.path.normpath(self.path))) branche.set("Lien", toSystemEncoding(self.path)) branche.set("TypeLien", self.type) branche.set("Abs", str(self.abs)) ###################################################################################### def setBranche(self, branche, pathdoc): self.path = toFileEncoding(branche.get("Lien", "")) if self.path == ".": self.path = "" # print("setBranche Lien", self.path) # self.path = os.path.normpath(self.path) self.type = branche.get("TypeLien", "") self.abs = eval(branche.get("Abs", "False")) if self.type == "" and self.path != "": self.EvalTypeLien(pathdoc) # print(" ", self.path) return True #################################################################################### # # Objet lien vers une image # #################################################################################### class LienImage(Lien): def __init__(self, path = ""): Lien.__init__(self, path, "f") self.image = None ###################################################################################### def getBranche(self, branche): Lien.getBranche(self, branche) # print(self.lien.path) # bmp = file2bmp(self.path) # if bmp is not None and bmp is not wx.NullBitmap: self.setBitmap() if self.image is not None and self.image is not wx.NullBitmap: branche.text = img2str(self.image.ConvertToImage()) # elif self.image is not None and self.image is not wx.NullBitmap: # branche.text = img2str(self.image.ConvertToImage()) ###################################################################################### def setBranche(self, branche, pathdoc): Lien.setBranche(self, branche, pathdoc) self.setBitmap(str2img(branche.text)) ###################################################################################### def setBitmap(self, bmp = None): if bmp is not None and isinstance(bmp, wx.Bitmap): self.image = bmp elif self.ok: bmp = file2bmp(self.path) if bmp is not None and bmp is not wx.NullBitmap: self.image = bmp ###################################################################################### def getBitmap(self, defaut = None): """ Renvoie l'image au format wx.Bitmap et met à jour l'image si le lien est Ok priorité à l'image désignée par le lien """ # print("getBitmap") # print(" ", self.type, self.ok) self.setBitmap() # print(" ", self.type, self.ok) if self.image is not None and self.image is not wx.NullBitmap: # print(" --", self.image.IsOk()) return self.image elif isinstance(defaut, wx.Bitmap): return defaut else: return wx.NullBitmap ###################################################################################### def getImageFile(self): """ Renvoie le noms du fichier image obtenu et un booléen indiquant s'il s'agit d'un fichier temporaire ATTENTION : les fichiers temporaires doivent être effacés """ if self.ok: nf = file2imgfile(self.path) elif self.image is not None and self.image is not wx.NullBitmap: nf = wximg2file(self.image) else: return None, None return nf ###################################################################################### def setPath(self, path): self.path = path self.setBitmap() ########################################################################################################## # # Dialogue de sélection d'URL # ########################################################################################################## class URLDialog(wx.Dialog): def __init__(self, parent, lien, pathref): wx.Dialog.__init__(self, parent, -1, "Sélection de lien") self.SetExtraStyle(wx.DIALOG_EX_CONTEXTHELP) # self.Create(parent, -1, "S�lection de lien") sizer = wx.BoxSizer(wx.VERTICAL) label = wx.StaticText(self, -1, "Sélectionner un fichier, un dossier ou une URL") label.SetHelpText("Sélectionner un fichier, un dossier ou une URL") sizer.Add(label, 0, wx.ALIGN_CENTRE|wx.ALL, 5) box = wx.BoxSizer(wx.HORIZONTAL) label = wx.StaticText(self, -1, "Lien :") # label.SetHelpText("This is the help text for the label") box.Add(label, 0, wx.ALIGN_CENTRE|wx.ALL, 5) url = URLSelectorCombo(self, lien, pathref) # text.SetHelpText("Here's some help text for field #1") box.Add(url, 1, wx.ALIGN_CENTRE|wx.ALL, 5) self.url = url sizer.Add(box, 0, wx.GROW|wx.ALL, 5) line = wx.StaticLine(self, -1, size=(20,-1), style=wx.LI_HORIZONTAL) sizer.Add(line, 0, wx.GROW|wx.RIGHT|wx.TOP, 5) btnsizer = wx.StdDialogButtonSizer() if wx.Platform != "__WXMSW__": btn = wx.ContextHelpButton(self) btnsizer.AddButton(btn) btn = wx.Button(self, wx.ID_OK) btn.SetHelpText("Valider") btn.SetDefault() btnsizer.AddButton(btn) btn = wx.Button(self, wx.ID_CANCEL) btn.SetHelpText("Annuler") btnsizer.AddButton(btn) btnsizer.Realize() sizer.Add(btnsizer, 0,wx.ALL, 5) self.SetSizer(sizer) sizer.Fit(self) ###################################################################################### def GetURL(self): return self.url.GetPath() ###################################################################################### def OnPathModified(self, lien): return #################################################################################### # # Evenement perso pour détecter une modification du chemin # #################################################################################### myEVT_PATH_MODIFIED = wx.NewEventType() EVT_PATH_MODIFIED = wx.PyEventBinder(myEVT_PATH_MODIFIED, 1) #---------------------------------------------------------------------- class PathEvent(wx.PyCommandEvent): def __init__(self, evtType, idd): wx.PyCommandEvent.__init__(self, evtType, idd) self.lien = None ###################################################################################### def SetPath(self, lien): self.lien = lien ###################################################################################### def GetPath(self): return self.lien #################################################################################### # # Widget pour sélectionner un lien # #################################################################################### class URLSelectorBase(wx.Panel): def __init__(self, parent, lien, pathref, dossier = True, btn_ouvrir = False, ext = ""): """ lien : type Lien pathref : chemin du dossier de référence (pour chemins relatifs) dossier : bool pour spécifier que le lien est un dossier ext : Extension de fichier par défaut """ # print("init URLSelectorBase", lien.path) wx.Panel.__init__(self, parent, -1) self.SetMaxSize((-1,22*SSCALE)) self.ext = ext # Extension de fichier par défaut self.lien = lien # self.texte = None sizer = wx.BoxSizer(wx.VERTICAL) lsizer = self.CreateSelector(dossier, btn_ouvrir) sizer.Add(lsizer, 1, flag = wx.EXPAND) self.SetSizerAndFit(sizer) self.SetPathSeq(pathref) ############################################################################################### def CreateSelector(self, dossier = True, btn_ouvrir = False): # Passage momentan� en Anglais (bug de wxpython) # locale2EN() # loc = wx.GetApp().locale.GetSystemLanguage() # wx.GetApp().locale = wx.Locale(wx.LANGUAGE_ENGLISH) sizer = wx.BoxSizer(wx.HORIZONTAL) bsize = (16*SSCALE, 16*SSCALE) # print(" ", self.lien.path) self.texte = wx.TextCtrl(self, -1, toSystemEncoding(self.lien.path), size = (-1, bsize[1])) if dossier: # bt1 =wx.BitmapButton(self, 100, wx.ArtProvider.GetBitmap(wx.ART_FOLDER, wx.ART_OTHER, bsize)) bt1 =wx.BitmapButton(self, 100, scaleImage(images.Icone_folder.GetBitmap(), *bsize)) bt1.SetToolTip("Sélectionner un dossier") self.Bind(wx.EVT_BUTTON, self.OnClick, bt1) self.bt1 = bt1 sizer.Add(bt1) # bt2 =wx.BitmapButton(self, 101, images.wx.ArtProvider.GetBitmap(wx.ART_NORMAL_FILE, wx.ART_OTHER, bsize)) bt2 =wx.BitmapButton(self, 101, scaleImage(images.Icone_fichier.GetBitmap(), *bsize)) bt2.SetToolTip("Sélectionner un fichier") self.Bind(wx.EVT_BUTTON, self.OnClick, bt2) self.Bind(wx.EVT_TEXT, self.EvtText, self.texte) self.bt2 = bt2 self.cb = wx.CheckBox(self, label='/', pos=(20, 20)) self.cb.SetToolTip("Cocher pour utiliser un chemin absolu") self.cb.Bind(wx.EVT_CHECKBOX, self.OnCbAbs, self.cb) sizer.Add(bt2) sizer.Add(self.cb, flag = wx.EXPAND) sizer.Add(self.texte, 1, flag = wx.EXPAND) if btn_ouvrir: self.btnlien = wx.BitmapButton(self, -1, scaleImage(images.Icone_open.GetBitmap(), *bsize)) self.btnlien.Show(self.lien.path != "") self.Bind(wx.EVT_BUTTON, self.OnClickLien, self.btnlien) sizer.Add(self.btnlien) # Pour drag&drop direct de fichiers !! (exp�rimental) file_drop_target = MyFileDropTarget(self) self.SetDropTarget(file_drop_target) # locale2def() # wx.GetApp().locale = wx.Locale(loc) return sizer ######################################################################################################### def sendEvent(self): # print("sendEvent", modif, draw, verif) evt = PathEvent(myEVT_PATH_MODIFIED, self.GetId()) evt.SetPath(self.lien) self.GetEventHandler().ProcessEvent(evt) ############################################################################################### def SetToolTipTexte(self): t = self.lien.getTexte() if self.lien.path == "": self.texte.SetToolTip("Saisir un nom de fichier/dossier ou un URL\nou faire glisser un fichier") elif self.lien.ok: self.texte.SetToolTip(self.lien.path) if hasattr(self, 'btnlien'): self.btnlien.SetToolTip("Ouvrir le %s" %t.le_()) else: self.texte.SetToolTip("Chemin non valide :\n"+self.lien.path) ############################################################################# def OnClickLien(self, event): self.lien.Afficher(self.pathref) ############################################################################################### # Overridden from ComboCtrl, called when the combo button is clicked def OnClick(self, event): if event.GetId() == 100: dlg = wx.DirDialog(self, "Sélectionner un dossier", style=wx.DD_DEFAULT_STYLE, defaultPath = toSystemEncoding(self.pathref) #| wx.DD_DIR_MUST_EXIST #| wx.DD_CHANGE_DIR ) if dlg.ShowModal() == wx.ID_OK: self.SetPath(dlg.GetPath(), 'd', marquerModifier = True) dlg.Destroy() else: dlg = wx.FileDialog(self, "Sélectionner un fichier", wildcard = self.ext, defaultDir = toSystemEncoding(self.pathref), # defaultPath = globdef.DOSSIER_EXEMPLES, style = wx.DD_DEFAULT_STYLE #| wx.DD_DIR_MUST_EXIST #| wx.DD_CHANGE_DIR ) if dlg.ShowModal() == wx.ID_OK: self.SetPath(dlg.GetPath(), 'f', marquerModifier = True) dlg.Destroy() self.MiseAJour() self.SetFocus() ############################################################################################### def OnCbAbs(self, event): box = event.GetEventObject() self.lien.abs = box.GetValue() self.lien.EvalLien(self.lien.path, self.pathref) self.SetPath(marquerModifier = True) event.Skip() ########################################################################################## def EvtText(self, event): # self.lien.EvalLien(event.GetString(), self.pathref) # if not self.lien.ok: # self.lien.EvalTypeLien(self.pathref) self.SetPath(event.GetString(), marquerModifier = True) ############################################################################################### def dropFiles(self, file_list): for path in file_list: self.SetPath(path, 'f', marquerModifier = True) return ############################################################################################### def Enable(self, etat): self.texte.Enable(etat) self.bt2.Enable(etat) if hasattr(self, "bt1"): self.bt1.Enable(etat) ########################################################################################## def SetPathSeq(self, pathref): self.pathref = pathref self.lien.EvalTypeLien(self.pathref) self.cb.SetValue(self.lien.abs) self.SetToolTipTexte() ########################################################################################## def SetPath(self, lien = None, typ = None, marquerModifier = False): """ lien doit être de type 'String' encodé en SYSTEM_ENCODING """ # print("SetPath", self.lien) # print " ", lien, typ if lien is not None: self.lien.setPath(lien) self.lien.EvalLien(lien, self.pathref) # print(" ", self.lien.path) try: self.texte.ChangeValue(self.lien.path) except: # Ca ne devrait pas arriver ... et pourtant �a arrive ! self.lien.path = self.lien.path.decode(FILE_ENCODING) # self.lien.path = self.lien.path.encode(SYSTEM_ENCODING) self.texte.ChangeValue(toSystemEncoding(self.lien.path)) # On le met en SYSTEM_ENCODING # print(" ", self.lien.ok) self.MiseAJour() if marquerModifier: self.sendEvent() ########################################################################################## def GetPath(self): return self.lien ############################################################################################### def MiseAJour(self): # self.btnlien.Show(self.lien.path != "") self.marquerValid() self.cb.SetValue(self.lien.abs) ########################################################################################## def marquerValid(self): if self.lien.ok: self.texte.SetBackgroundColour( wx.SystemSettings.GetColour(wx.SYS_COLOUR_WINDOW)) else: self.texte.SetBackgroundColour("pink") self.texte.SetFocus() if hasattr(self, 'btnlien'): self.btnlien.Enable(self.lien.ok) self.SetToolTipTexte() self.Refresh() #################################################################################### # # Widget pour sélectionner un lien # #################################################################################### class URLSelectorCombo(URLSelectorBase): def __init__(self, parent, lien, pathref, dossier = True, ext = ""): """ lien : type Lien pathref : chemin du dossier de référence (pour chemins relatifs) dossier : bool pour spécifier que le lien est un dossier ext : """ # print "init URLSelectorCombo", pathref URLSelectorBase.__init__(self, parent, lien, pathref, dossier, btn_ouvrir = True, ext = ext) # ############################################################################################### # def CreateSelector(self): # # Passage momentan� en Anglais (bug de wxpython) # # locale2EN() # # loc = wx.GetApp().locale.GetSystemLanguage() # # wx.GetApp().locale = wx.Locale(wx.LANGUAGE_ENGLISH) # # sizer = wx.BoxSizer(wx.HORIZONTAL) # bsize = (16*SSCALE, 16*SSCALE) # # self.texte = wx.TextCtrl(self, -1, toSystemEncoding(self.lien.path), size = (-1, bsize[1])) # # # if self.dossier: # # bt1 =wx.BitmapButton(self, 100, wx.ArtProvider.GetBitmap(wx.ART_FOLDER, wx.ART_OTHER, bsize)) # bt1 =wx.BitmapButton(self, 100, scaleImage(images.Icone_folder.GetBitmap(), *bsize)) # bt1.SetToolTip("Sélectionner un dossier") # self.Bind(wx.EVT_BUTTON, self.OnClick, bt1) # self.bt1 = bt1 # sizer.Add(bt1) # # bt2 =wx.BitmapButton(self, 101, images.wx.ArtProvider.GetBitmap(wx.ART_NORMAL_FILE, wx.ART_OTHER, bsize)) # # bt2 =wx.BitmapButton(self, 101, scaleImage(images.Icone_fichier.GetBitmap(), *bsize)) # bt2.SetToolTip("Sélectionner un fichier") # self.Bind(wx.EVT_BUTTON, self.OnClick, bt2) # self.Bind(wx.EVT_TEXT, self.EvtText, self.texte) # self.bt2 = bt2 # # sizer.Add(bt2) # sizer.Add(self.texte,1,flag = wx.EXPAND) # # # self.btnlien = wx.BitmapButton(self, -1, wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN, wx.ART_OTHER, bsize)) # self.btnlien = wx.BitmapButton(self, -1, scaleImage(images.Icone_open.GetBitmap(), *bsize)) # self.btnlien.SetToolTip("Ouvrir le lien externe") # self.btnlien.Show(self.lien.path != "") # self.Bind(wx.EVT_BUTTON, self.OnClickLien, self.btnlien) # sizer.Add(self.btnlien) # # # # Pour drag&drop direct de fichiers !! (exp�rimental) # file_drop_target = MyFileDropTarget(self) # self.SetDropTarget(file_drop_target) # # # locale2def() # # wx.GetApp().locale = wx.Locale(loc) # # return sizer #################################################################################### # # Widget pour sélectionner un lien # #################################################################################### class URLSelector(URLSelectorBase): def __init__(self, parent, lien, pathref, dossier = True, ext = ""): """ lien : type Lien pathref : chemin du dossier de référence (pour chemins relatifs) dossier : bool pour spécifier que le lien est un dossier ext : """ # print "init URLSelectorCombo", pathref URLSelectorBase.__init__(self, parent, lien, pathref, dossier = False , btn_ouvrir = False, ext = ext) # ############################################################################################### # def CreateSelector(self): # # Passage momentan� en Anglais (bug de wxpython) # # locale2EN() # # loc = wx.GetApp().locale.GetSystemLanguage() # # wx.GetApp().locale = wx.Locale(wx.LANGUAGE_ENGLISH) # # sizer = wx.BoxSizer(wx.HORIZONTAL) # bsize = (16*SSCALE, 16*SSCALE) # # self.texte = wx.TextCtrl(self, -1, toSystemEncoding(self.lien.path), size = (-1, bsize[1])) # # # if self.dossier: # # bt1 =wx.BitmapButton(self, 100, wx.ArtProvider.GetBitmap(wx.ART_FOLDER, wx.ART_OTHER, bsize)) # bt1 =wx.BitmapButton(self, 100, scaleImage(images.Icone_folder.GetBitmap(), *bsize)) # bt1.SetToolTip("Sélectionner un dossier") # self.Bind(wx.EVT_BUTTON, self.OnClick, bt1) # self.bt1 = bt1 # sizer.Add(bt1) # # bt2 =wx.BitmapButton(self, 101, images.wx.ArtProvider.GetBitmap(wx.ART_NORMAL_FILE, wx.ART_OTHER, bsize)) # # bt2 =wx.BitmapButton(self, 101, scaleImage(images.Icone_fichier.GetBitmap(), *bsize)) # bt2.SetToolTip("Sélectionner un fichier") # self.Bind(wx.EVT_BUTTON, self.OnClick, bt2) # self.Bind(wx.EVT_TEXT, self.EvtText, self.texte) # self.bt2 = bt2 # # sizer.Add(bt2) # sizer.Add(self.texte,1,flag = wx.EXPAND) # # # self.cb = wx.CheckBox(self, label='abs', pos=(20, 20)) # self.cb.SetToolTip("Cocher pour utiliser un chemin de fichier absolu") # self.cb.Bind(wx.EVT_CHECKBOX, self.OnCbAbs, self.cb) # sizer.Add(self.cb,flag = wx.EXPAND) # # # # self.btnlien = wx.BitmapButton(self, -1, wx.ArtProvider.GetBitmap(wx.ART_FILE_OPEN, wx.ART_OTHER, bsize)) # # self.btnlien = wx.BitmapButton(self, -1, scaleImage(images.Icone_open.GetBitmap(), *bsize)) # # self.btnlien.SetToolTip("Ouvrir le lien externe") # # self.btnlien.Show(self.lien.path != "") # # self.Bind(wx.EVT_BUTTON, self.OnClickLien, self.btnlien) # # sizer.Add(self.btnlien) # # # # Pour drag&drop direct de fichiers !! (exp�rimental) # file_drop_target = MyFileDropTarget(self) # self.SetDropTarget(file_drop_target) # # # locale2def() # # wx.GetApp().locale = wx.Locale(loc) # # return sizer # # # ########################################################################################################## # # # # Dialogue de paramétrage du selecteur<|fim▁hole|># wx.Dialog.__init__(self, parent, -1, "Paramètres") # self.SetExtraStyle(wx.DIALOG_EX_CONTEXTHELP) # # sizer = wx.BoxSizer(wx.VERTICAL) # # label = wx.StaticText(self, -1, "Sélectionner un fichier, un dossier ou une URL") # label.SetHelpText("Sélectionner un fichier, un dossier ou une URL") # sizer.Add(label, 0, wx.ALIGN_CENTRE|wx.ALL, 5) # # box = wx.BoxSizer(wx.HORIZONTAL) # # label = wx.StaticText(self, -1, "Lien :") # # label.SetHelpText("This is the help text for the label") # box.Add(label, 0, wx.ALIGN_CENTRE|wx.ALL, 5) # # url = URLSelectorCombo(self, lien, pathref) # # text.SetHelpText("Here's some help text for field #1") # box.Add(url, 1, wx.ALIGN_CENTRE|wx.ALL, 5) # self.url = url # # sizer.Add(box, 0, wx.GROW|wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5) # # line = wx.StaticLine(self, -1, size=(20,-1), style=wx.LI_HORIZONTAL) # sizer.Add(line, 0, wx.GROW|wx.ALIGN_CENTER_VERTICAL|wx.RIGHT|wx.TOP, 5) # # btnsizer = wx.StdDialogButtonSizer() # # if wx.Platform != "__WXMSW__": # btn = wx.ContextHelpButton(self) # btnsizer.AddButton(btn) # # btn = wx.Button(self, wx.ID_OK) # btn.SetHelpText("Valider") # btn.SetDefault() # btnsizer.AddButton(btn) # # btn = wx.Button(self, wx.ID_CANCEL) # btn.SetHelpText("Annuler") # btnsizer.AddButton(btn) # btnsizer.Realize() # # sizer.Add(btnsizer, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5) # # self.SetSizer(sizer) # sizer.Fit(self) # # # ###################################################################################### # def GetURL(self): # return self.url.GetPath() # # # ###################################################################################### # def OnPathModified(self, lien): # return # #<|fim▁end|>
# # # ########################################################################################################## # class URLParamDialog(wx.Dialog): # def __init__(self, parent, lien, pathref):
<|file_name|>test_settings.py<|end_file_name|><|fim▁begin|>from django.test import override_settings, SimpleTestCase from arcutils.settings import NO_DEFAULT, PrefixedSettings, get_setting @override_settings(ARC={ 'a': 'a', 'b': [0, 1], 'c': [{'c': 'c'}], 'd': 'd', }) class TestGetSettings(SimpleTestCase): def get_setting(self, key, default=NO_DEFAULT): return get_setting(key, default=default) def test_can_traverse_into_dict(self): self.assertEqual(self.get_setting('ARC.a'), 'a') def test_can_traverse_into_dict_then_list(self): self.assertEqual(self.get_setting('ARC.b.0'), 0) def test_can_traverse_into_list_then_dict(self): self.assertEqual(self.get_setting('ARC.c.0.c'), 'c') def test_returns_default_for_non_existent_root(self): default = object() self.assertIs(self.get_setting('NOPE', default), default) def test_returns_default_for_non_existent_nested_setting(self): default = object() self.assertIs(self.get_setting('ARC.nope', default), default) def test_raises_when_not_found_and_no_default(self): self.assertRaises(KeyError, self.get_setting, 'NOPE') def test_can_traverse_into_string_setting(self): self.assertEqual(self.get_setting('ARC.d.0'), 'd') def test_bad_index_causes_type_error(self): self.assertRaises(TypeError, self.get_setting, 'ARC.b.nope') @override_settings(CAS={ 'extra': 'extra', 'overridden': 'overridden', }) class TestGetPrefixedSettings(SimpleTestCase): def setUp(self): super().setUp() defaults = { 'base_url': 'http://example.com/cas/', 'parent': { 'child': 'child', }, 'overridden': 'default', } self.settings = PrefixedSettings('CAS', defaults) def test_get_from_defaults(self): self.assertEqual(self.settings.get('base_url'), 'http://example.com/cas/') def test_get_nested_from_defaults(self): self.assertEqual(self.settings.get('parent.child'), 'child') def test_get_from_project_settings(self): self.assertEqual(self.settings.get('extra'), 'extra') def test_get_setting_overridden_in_project_settings(self): self.assertEqual(self.settings.get('overridden'), 'overridden') def test_defaults_trump_passed_default(self):<|fim▁hole|> self.assertEqual( self.settings.get('base_url', 'http://example.com/other/'), 'http://example.com/cas/') def test_passed_default_does_not_trump_project_setting(self): self.assertEqual(self.settings.get('extra', 'default'), 'extra') def test_get_default_for_nonexistent(self): self.assertEqual(self.settings.get('pants', 'jeans'), 'jeans')<|fim▁end|>
<|file_name|>feature-gate-log_syntax2.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // gate-test-log_syntax <|fim▁hole|>fn main() { println!("{}", log_syntax!()); //~ ERROR `log_syntax!` is not stable }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import unittest import tempfile from jsonconfigparser import JSONConfigParser, NoSectionError, ParseError class JSONConfigTestCase(unittest.TestCase): def test_init(self): JSONConfigParser() def test_read_string(self): cf = JSONConfigParser() cf.read_string(( '[section]\n' '# comment comment\n' 'foo = "bar"\n' '\n' '[section2]\n' 'bar = "baz"\n' )) self.assertEqual(cf.get('section', 'foo'), 'bar') def test_read_file(self): string = '[section]\n' + \ 'foo = "bar"' fp = tempfile.NamedTemporaryFile('w+') fp.write(string) fp.seek(0) cf = JSONConfigParser() cf.read_file(fp) self.assertEqual(cf.get('section', 'foo'), 'bar') def test_get(self): cf = JSONConfigParser() cf.add_section('section') cf.set('section', 'section', 'set-in-section') self.assertEqual(cf.get('section', 'section'), 'set-in-section') def test_get_from_defaults(self): cf = JSONConfigParser() cf.set(cf.default_section, 'option', 'set-in-defaults') try: cf.get('section', 'option') except NoSectionError: pass else: # pragma: no cover self.fail("Only fall back to defaults if section exists") cf.add_section('section') self.assertEqual(cf.get('section', 'option'), 'set-in-defaults', msg="get should fall back to defaults if value not \ set in section") cf.set('section', 'option', 'set-normally') self.assertEqual(cf.get('section', 'option'), 'set-normally', msg="get shouldn't fall back if option is set \ normally") def test_get_from_vars(self): cf = JSONConfigParser() cf.add_section('section') cf.set('section', 'option', 'set-in-section') self.assertEqual(cf.get('section', 'option', vars={'option': 'set-in-vars'}), 'set-in-vars', msg="vars should take priority over options in \ section") self.assertEqual(cf.get('section', 'option', vars={}), 'set-in-section', msg="get should fall back to section if option not \ in vars") def test_get_from_fallback(self): cf = JSONConfigParser() cf.add_section('section') # returns from fallback if section exists self.assertEqual(cf.get('section', 'unset', 'fallback'), 'fallback') try: cf.get('nosection', 'unset', 'fallback') except NoSectionError: pass else: # pragma: no cover self.fail() <|fim▁hole|> # option in nonexistant section does not exist self.assertFalse(cf.has_option('nonexistant', 'unset')) cf.add_section('section') self.assertFalse(cf.has_option('section', 'unset'), msg="has_option should return False if section \ exists but option is unset") cf.set('section', 'set', 'set-normally') self.assertTrue(cf.has_option('section', 'set'), msg="has option should return True if option is set \ normally") cf.set(cf.default_section, 'default', 'set-in-defaults') self.assertTrue(cf.has_option('section', 'default'), msg="has_option should return True if option set in \ defaults") def test_remove_option(self): cf = JSONConfigParser() cf.add_section('section') cf.set('section', 'normal', 'set-normally') cf.set(cf.default_section, 'default', 'set-in-defaults') # can remove normal options self.assertTrue(cf.remove_option('section', 'normal')) self.assertFalse(cf.has_option('section', 'normal')) # can't remove defaults accidentally (maybe there should be shadowing) self.assertFalse(cf.remove_option('section', 'default')) self.assertEqual(cf.get('section', 'default'), 'set-in-defaults') def test_invalid_section(self): cf = JSONConfigParser() try: cf.read_string(( '[valid]\n' 'irrelevant = "meh"\n' '[]' )) except ParseError as e: self.assertEqual(e.lineno, 3) # check that nothing was added self.assertEqual(sum(1 for _ in cf.sections()), 0) else: # pragma: no cover self.fail() try: cf.read_string(( '[nooooooooooooooooooo' )) except ParseError as e: self.assertEqual(e.lineno, 1) # check that nothing was added self.assertEqual(sum(1 for _ in cf.sections()), 0) else: # pragma: no cover self.fail() def test_invalid_values(self): cf = JSONConfigParser() try: cf.read_string(( '[section]\n' 'unmatched = [1,2,3}' )) except ParseError as e: self.assertEqual(e.lineno, 2) # check that nothing was added self.assertEqual(sum(1 for _ in cf.sections()), 0) else: # pragma: no cover self.fail() try: cf.read_string(( '[section]\n' 'unterminated = "something\n' )) except ParseError as e: self.assertEqual(e.lineno, 2) # check that nothing was added self.assertEqual(sum(1 for _ in cf.sections()), 0) else: # pragma: no cover self.fail() suite = unittest.TestLoader().loadTestsFromTestCase(JSONConfigTestCase)<|fim▁end|>
def test_has_option(self): cf = JSONConfigParser()
<|file_name|>component_access_token.py<|end_file_name|><|fim▁begin|>import redis from app.config import get_config_obj from app.util.httputil import Http_util class Component_access_token(): def __init__(self): self.component_appid = get_config_obj().component_appid self.component_appsecret = get_config_obj().component_secret self.r = redis.Redis(host='localhost', port=6379, db=0) def get_component_verify_ticket(self):<|fim▁hole|> def get_commponent_access_token(self): token_json_data = Http_util().post_get_component_access_token(self.get_component_verify_ticket()) # TODO 保存 return token_json_data.get("component_access_token")<|fim▁end|>
# TODO 读取保存的ticket component_verify_ticket = self.r.get('component_verify_ticket') return component_verify_ticket
<|file_name|>purge.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 Matthieu Huguet <|fim▁hole|># Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import os import sys from .types import BackupCollection, Backup def run(config): backups = get_backup_collection(config.backup_dir) days = backups.days() if not days: return 0 days_to_keep = get_days_to_keep(days, config) if days_to_keep or config.force: backups_to_remove = backups.except_days(set(days_to_keep)) backups_to_remove.remove_all(config.noop) return 0 else: sys.stderr.write(""" WARNING : With the specified retention rules, all the files in the specified directory will be deleted. If you only specified -m and / or -w, it means that there is no file in the directory that match your retention rules. Please look at --day-of-week or --day-of-month options. If you really know what you are doing, you can use option --force to remove all your backup files according to your retention rules. """) return 1 def get_backup_collection(backup_dir): daily_backups = BackupCollection() for file in os.listdir(backup_dir): fpath = os.path.join(backup_dir, file) if not os.path.islink(fpath) and os.path.isfile(fpath): backup = Backup.from_path(fpath) daily_backups.add(backup) return daily_backups def get_days_to_keep(days, config): days_to_keep = daily_backup_days(days, config.days_retention) days_to_keep += weekly_backup_days( days, config.dow, config.weeks_retention) days_to_keep += monthly_backup_days( days, config.dom, config.months_retention) return days_to_keep def daily_backup_days(days, retention): return days[:retention] def weekly_backup_days(days, dow, retention): weekly_days = [day for day in days if day.isoweekday() == dow] return weekly_days[:retention] def monthly_backup_days(days, dom, retention): monthly_days = [day for day in days if day.day == dom] return monthly_days[:retention]<|fim▁end|>
<|file_name|>quaternion_coordinates.rs<|end_file_name|><|fim▁begin|>use std::mem; use std::ops::{Deref, DerefMut}; use simba::simd::SimdValue; use crate::base::coordinates::IJKW; use crate::Scalar; use crate::geometry::Quaternion; impl<N: Scalar + SimdValue> Deref for Quaternion<N> { type Target = IJKW<N>;<|fim▁hole|> fn deref(&self) -> &Self::Target { unsafe { mem::transmute(self) } } } impl<N: Scalar + SimdValue> DerefMut for Quaternion<N> { #[inline] fn deref_mut(&mut self) -> &mut Self::Target { unsafe { mem::transmute(self) } } }<|fim▁end|>
#[inline]
<|file_name|>test_issue_20.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ####################################################################### # Name: test_optional_in_choice # Purpose: Optional matches always succeds but should not stop alternative # probing on failed match. # Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com><|fim▁hole|># License: MIT License ####################################################################### from __future__ import unicode_literals # Grammar from arpeggio import ParserPython, Optional, EOF def g(): return [Optional('first'), Optional('second'), Optional('third')], EOF def test_optional_in_choice(): parser = ParserPython(g) input_str = "second" parse_tree = parser.parse(input_str) assert parse_tree is not None<|fim▁end|>
# Copyright: (c) 2015 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
<|file_name|>GameClient.ts<|end_file_name|><|fim▁begin|>export namespace GameClient { export function initGame(socket: WebSocket) { socket.onopen = () => { console.log("Connected to server"); }; socket.onmessage = (message: MessageEvent) => { console.log(message.data); }; socket.onclose = () => {<|fim▁hole|> } }<|fim▁end|>
alert("Connection closed"); };
<|file_name|>package.py<|end_file_name|><|fim▁begin|>############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class PyNbformat(PythonPackage): """The Jupyter Notebook format""" <|fim▁hole|> version('4.1.0', '826b4fc4ec42553b20144f53b57b4e7b') version('4.0.1', 'ab7172e517c9d561c0c01eef5631b4c8') version('4.0.0', '7cf61359fa4e9cf3ef5e969e2fcb909e') depends_on('py-ipython-genutils', type=('build', 'run')) depends_on('py-traitlets', type=('build', 'run')) depends_on('py-jsonschema', type=('build', 'run')) depends_on('py-jupyter-core', type=('build', 'run'))<|fim▁end|>
homepage = "https://github.com/jupyter/nbformat" url = "https://github.com/jupyter/nbformat/archive/4.1.0.tar.gz"
<|file_name|>issue-46553.rs<|end_file_name|><|fim▁begin|>// run-pass #![feature(const_fn_fn_ptr_basics)] #![deny(const_err)] pub struct Data<T> { function: fn() -> T, } impl<T> Data<T> { pub const fn new(function: fn() -> T) -> Data<T> { Data { function: function, } } } pub static DATA: Data<i32> = Data::new(|| { 413i32<|fim▁hole|>}); fn main() { print!("{:?}", (DATA.function)()); }<|fim▁end|>
<|file_name|>OrderApp_order.graphql.ts<|end_file_name|><|fim▁begin|>/* tslint:disable */ /* eslint-disable */ import { ReaderFragment } from "relay-runtime"; import { FragmentRefs } from "relay-runtime"; export type CommerceOrderModeEnum = "BUY" | "OFFER" | "%future added value"; export type OrderApp_order = { readonly mode: CommerceOrderModeEnum | null; readonly lineItems: { readonly edges: ReadonlyArray<{ readonly node: { readonly artwork: { readonly href: string | null; readonly slug: string; readonly is_acquireable: boolean | null; readonly is_offerable: boolean | null; } | null; } | null; } | null> | null; } | null; readonly " $refType": "OrderApp_order"; }; export type OrderApp_order$data = OrderApp_order; export type OrderApp_order$key = { readonly " $data"?: OrderApp_order$data; readonly " $fragmentRefs": FragmentRefs<"OrderApp_order">; }; const node: ReaderFragment = { "argumentDefinitions": [], "kind": "Fragment", "metadata": null, "name": "OrderApp_order", "selections": [ { "alias": null, "args": null, "kind": "ScalarField", "name": "mode", "storageKey": null }, { "alias": null, "args": null,<|fim▁hole|> "concreteType": "CommerceLineItemConnection", "kind": "LinkedField", "name": "lineItems", "plural": false, "selections": [ { "alias": null, "args": null, "concreteType": "CommerceLineItemEdge", "kind": "LinkedField", "name": "edges", "plural": true, "selections": [ { "alias": null, "args": null, "concreteType": "CommerceLineItem", "kind": "LinkedField", "name": "node", "plural": false, "selections": [ { "alias": null, "args": null, "concreteType": "Artwork", "kind": "LinkedField", "name": "artwork", "plural": false, "selections": [ { "alias": null, "args": null, "kind": "ScalarField", "name": "href", "storageKey": null }, { "alias": null, "args": null, "kind": "ScalarField", "name": "slug", "storageKey": null }, { "alias": "is_acquireable", "args": null, "kind": "ScalarField", "name": "isAcquireable", "storageKey": null }, { "alias": "is_offerable", "args": null, "kind": "ScalarField", "name": "isOfferable", "storageKey": null } ], "storageKey": null } ], "storageKey": null } ], "storageKey": null } ], "storageKey": null } ], "type": "CommerceOrder" }; (node as any).hash = '22f0547ca97d2ba0d33dbc5db1aa4c77'; export default node;<|fim▁end|>
<|file_name|>scope.ts<|end_file_name|><|fim▁begin|>import { hideProperty } from '../util'; export const scopeKey = '__scope__'; export class ScopeData { public $state: any = {}; public $getters: any = {}; public static get(ctx: any): ScopeData { return ctx[scopeKey] || (function () {<|fim▁hole|> } }<|fim▁end|>
const scope = new ScopeData(); hideProperty(ctx, scopeKey, scope); return scope; })();
<|file_name|>Command.cpp<|end_file_name|><|fim▁begin|>/*************** <auto-copyright.pl BEGIN do not edit this line> ************** * * VR Juggler is (C) Copyright 1998-2007 by Iowa State University * * Original Authors: * Allen Bierbaum, Christopher Just, * Patrick Hartling, Kevin Meinert, * Carolina Cruz-Neira, Albert Baker * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public * License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. * *************** <auto-copyright.pl END do not edit this line> ***************/ #include <gadget/gadgetConfig.h> #include <boost/concept_check.hpp> #include <vpr/Util/Debug.h> #include <vpr/IO/ObjectWriter.h> #include <vpr/IO/ObjectReader.h> #include <gadget/Type/Command.h> namespace gadget { const CommandData Command::getCommandData(int devNum) { SampleBuffer_t::buffer_t& stable_buffer = mCommandSamples.stableBuffer(); if ( (!stable_buffer.empty()) && (stable_buffer.back().size() > (unsigned)devNum) ) // If Have entry && devNum in range { return stable_buffer.back()[devNum]; } else // No data or request out of range, return default value { if ( stable_buffer.empty() ) { vprDEBUG(vprDBG_ALL, vprDBG_WARNING_LVL) << "WARNING: [gadget::Command::getCommandData()] " << "Stable buffer is empty. If this is not the first " << "read, then this is a problem.\n" << vprDEBUG_FLUSH; } else { vprDEBUG(vprDBG_ALL, vprDBG_CONFIG_LVL) << "WARNING: [gadget::Command::getCommandData()] " << "Requested devNum (" << devNum << ") is not in the range available. " << "This is probably a configuration error.\n" << vprDEBUG_FLUSH; } return mDefaultValue; } } void Command::writeObject(vpr::ObjectWriter* writer) { writer->beginTag(Command::getInputTypeName()); SampleBuffer_t::buffer_t& stable_buffer = mCommandSamples.stableBuffer(); writer->beginAttribute(gadget::tokens::DataTypeAttrib); // Write out the data type so that we can assert if reading in wrong // place. writer->writeUint16(MSG_DATA_COMMAND); writer->endAttribute();<|fim▁hole|> writer->endAttribute(); if ( !stable_buffer.empty() ) { mCommandSamples.lock(); for ( unsigned j = 0; j < stable_buffer.size(); ++j ) // For each vector in the stable buffer { writer->beginTag(gadget::tokens::BufferSampleTag); writer->beginAttribute(gadget::tokens::BufferSampleLenAttrib); writer->writeUint16(stable_buffer[j].size()); // Write the # of CommandDatas in the vector writer->endAttribute(); for ( unsigned i = 0; i < stable_buffer[j].size(); ++i ) // For each CommandData in the vector { writer->beginTag(gadget::tokens::DigitalValue); writer->beginAttribute(gadget::tokens::TimeStamp); writer->writeUint64(stable_buffer[j][i].getTime().usec()); // Write Time Stamp vpr::Uint64 writer->endAttribute(); writer->writeUint32((vpr::Uint32)stable_buffer[j][i].getDigital()); // Write Command Data(int) writer->endTag(); } writer->endTag(); } mCommandSamples.unlock(); } writer->endTag(); } void Command::readObject(vpr::ObjectReader* reader) { vprASSERT(reader->attribExists("rim.timestamp.delta")); vpr::Uint64 delta = reader->getAttrib<vpr::Uint64>("rim.timestamp.delta"); // ASSERT if this data is really not Command Data reader->beginTag(Command::getInputTypeName()); reader->beginAttribute(gadget::tokens::DataTypeAttrib); vpr::Uint16 temp = reader->readUint16(); reader->endAttribute(); // XXX: Should there be error checking for the case when vprASSERT() // is compiled out? -PH 8/21/2003 vprASSERT(temp==MSG_DATA_COMMAND && "[Remote Input Manager]Not Command Data"); boost::ignore_unused_variable_warning(temp); std::vector<CommandData> dataSample; unsigned numCommandDatas; vpr::Uint32 value; vpr::Uint64 timeStamp; CommandData temp_command_data; reader->beginAttribute(gadget::tokens::SampleBufferLenAttrib); unsigned numVectors = reader->readUint16(); reader->endAttribute(); mCommandSamples.lock(); for ( unsigned i = 0; i < numVectors; ++i ) { reader->beginTag(gadget::tokens::BufferSampleTag); reader->beginAttribute(gadget::tokens::BufferSampleLenAttrib); numCommandDatas = reader->readUint16(); reader->endAttribute(); dataSample.clear(); for ( unsigned j = 0; j < numCommandDatas; ++j ) { reader->beginTag(gadget::tokens::DigitalValue); reader->beginAttribute(gadget::tokens::TimeStamp); timeStamp = reader->readUint64(); // read Time Stamp vpr::Uint64 reader->endAttribute(); value = reader->readUint32(); // read Command Data(int) reader->endTag(); temp_command_data.setDigital(value); temp_command_data.setTime(vpr::Interval(timeStamp + delta,vpr::Interval::Usec)); dataSample.push_back(temp_command_data); } mCommandSamples.addSample(dataSample); reader->endTag(); } mCommandSamples.unlock(); mCommandSamples.swapBuffers(); reader->endTag(); } } // End of gadget namespace<|fim▁end|>
writer->beginAttribute(gadget::tokens::SampleBufferLenAttrib); // Write the # of vectors in the stable buffer. writer->writeUint16(stable_buffer.size());
<|file_name|>test_array_from_pyobj.py<|end_file_name|><|fim▁begin|>from __future__ import division, absolute_import, print_function import unittest import os import sys import copy from numpy import ( array, alltrue, ndarray, zeros, dtype, intp, clongdouble ) from numpy.testing import ( run_module_suite, assert_, assert_equal, SkipTest ) from numpy.core.multiarray import typeinfo import util wrap = None def setup(): """ Build the required testing extension module """ global wrap # Check compiler availability first if not util.has_c_compiler(): raise SkipTest("No C compiler available") if wrap is None: config_code = """ config.add_extension('test_array_from_pyobj_ext', sources=['wrapmodule.c', 'fortranobject.c'], define_macros=[]) """ d = os.path.dirname(__file__) src = [os.path.join(d, 'src', 'array_from_pyobj', 'wrapmodule.c'), os.path.join(d, '..', 'src', 'fortranobject.c'), os.path.join(d, '..', 'src', 'fortranobject.h')] wrap = util.build_module_distutils(src, config_code, 'test_array_from_pyobj_ext') def flags_info(arr): flags = wrap.array_attrs(arr)[6] return flags2names(flags) def flags2names(flags): info = [] for flagname in ['CONTIGUOUS', 'FORTRAN', 'OWNDATA', 'ENSURECOPY', 'ENSUREARRAY', 'ALIGNED', 'NOTSWAPPED', 'WRITEABLE', 'UPDATEIFCOPY', 'BEHAVED', 'BEHAVED_RO', 'CARRAY', 'FARRAY' ]: if abs(flags) & getattr(wrap, flagname, 0): info.append(flagname) return info class Intent(object): def __init__(self, intent_list=[]): self.intent_list = intent_list[:] flags = 0 for i in intent_list: if i == 'optional': flags |= wrap.F2PY_OPTIONAL else: flags |= getattr(wrap, 'F2PY_INTENT_' + i.upper()) self.flags = flags def __getattr__(self, name): name = name.lower() if name == 'in_': name = 'in' return self.__class__(self.intent_list + [name]) def __str__(self): return 'intent(%s)' % (','.join(self.intent_list)) def __repr__(self): return 'Intent(%r)' % (self.intent_list) def is_intent(self, *names): for name in names: if name not in self.intent_list: return False return True def is_intent_exact(self, *names): return len(self.intent_list) == len(names) and self.is_intent(*names) intent = Intent() _type_names = ['BOOL', 'BYTE', 'UBYTE', 'SHORT', 'USHORT', 'INT', 'UINT', 'LONG', 'ULONG', 'LONGLONG', 'ULONGLONG', 'FLOAT', 'DOUBLE', 'CFLOAT'] _cast_dict = {'BOOL': ['BOOL']} _cast_dict['BYTE'] = _cast_dict['BOOL'] + ['BYTE'] _cast_dict['UBYTE'] = _cast_dict['BOOL'] + ['UBYTE'] _cast_dict['BYTE'] = ['BYTE'] _cast_dict['UBYTE'] = ['UBYTE'] _cast_dict['SHORT'] = _cast_dict['BYTE'] + ['UBYTE', 'SHORT'] _cast_dict['USHORT'] = _cast_dict['UBYTE'] + ['BYTE', 'USHORT'] _cast_dict['INT'] = _cast_dict['SHORT'] + ['USHORT', 'INT'] _cast_dict['UINT'] = _cast_dict['USHORT'] + ['SHORT', 'UINT'] _cast_dict['LONG'] = _cast_dict['INT'] + ['LONG'] _cast_dict['ULONG'] = _cast_dict['UINT'] + ['ULONG'] _cast_dict['LONGLONG'] = _cast_dict['LONG'] + ['LONGLONG'] _cast_dict['ULONGLONG'] = _cast_dict['ULONG'] + ['ULONGLONG'] _cast_dict['FLOAT'] = _cast_dict['SHORT'] + ['USHORT', 'FLOAT'] _cast_dict['DOUBLE'] = _cast_dict['INT'] + ['UINT', 'FLOAT', 'DOUBLE'] _cast_dict['CFLOAT'] = _cast_dict['FLOAT'] + ['CFLOAT'] # 32 bit system malloc typically does not provide the alignment required by # 16 byte long double types this means the inout intent cannot be satisfied # and several tests fail as the alignment flag can be randomly true or fals # when numpy gains an aligned allocator the tests could be enabled again if ((intp().dtype.itemsize != 4 or clongdouble().dtype.alignment <= 8) and sys.platform != 'win32'): _type_names.extend(['LONGDOUBLE', 'CDOUBLE', 'CLONGDOUBLE']) _cast_dict['LONGDOUBLE'] = _cast_dict['LONG'] + \ ['ULONG', 'FLOAT', 'DOUBLE', 'LONGDOUBLE'] _cast_dict['CLONGDOUBLE'] = _cast_dict['LONGDOUBLE'] + \ ['CFLOAT', 'CDOUBLE', 'CLONGDOUBLE'] _cast_dict['CDOUBLE'] = _cast_dict['DOUBLE'] + ['CFLOAT', 'CDOUBLE'] class Type(object): _type_cache = {} def __new__(cls, name): if isinstance(name, dtype): dtype0 = name name = None for n, i in typeinfo.items(): if isinstance(i, tuple) and dtype0.type is i[-1]: name = n break obj = cls._type_cache.get(name.upper(), None) if obj is not None: return obj obj = object.__new__(cls) obj._init(name) cls._type_cache[name.upper()] = obj return obj def _init(self, name): self.NAME = name.upper() self.type_num = getattr(wrap, 'NPY_' + self.NAME) assert_equal(self.type_num, typeinfo[self.NAME][1]) self.dtype = typeinfo[self.NAME][-1] self.elsize = typeinfo[self.NAME][2] / 8 self.dtypechar = typeinfo[self.NAME][0] def cast_types(self): return [self.__class__(_m) for _m in _cast_dict[self.NAME]] def all_types(self): return [self.__class__(_m) for _m in _type_names] def smaller_types(self): bits = typeinfo[self.NAME][3] types = [] for name in _type_names: if typeinfo[name][3] < bits: types.append(Type(name)) return types def equal_types(self): bits = typeinfo[self.NAME][3] types = [] for name in _type_names: if name == self.NAME: continue if typeinfo[name][3] == bits: types.append(Type(name)) return types def larger_types(self): bits = typeinfo[self.NAME][3] types = [] for name in _type_names: if typeinfo[name][3] > bits: types.append(Type(name)) return types class Array(object): def __init__(self, typ, dims, intent, obj): self.type = typ self.dims = dims self.intent = intent self.obj_copy = copy.deepcopy(obj) self.obj = obj # arr.dtypechar may be different from typ.dtypechar self.arr = wrap.call(typ.type_num, dims, intent.flags, obj) assert_(isinstance(self.arr, ndarray), repr(type(self.arr))) self.arr_attr = wrap.array_attrs(self.arr) if len(dims) > 1: if self.intent.is_intent('c'): assert_(intent.flags & wrap.F2PY_INTENT_C) assert_(not self.arr.flags['FORTRAN'], repr((self.arr.flags, getattr(obj, 'flags', None)))) assert_(self.arr.flags['CONTIGUOUS']) assert_(not self.arr_attr[6] & wrap.FORTRAN) else: assert_(not intent.flags & wrap.F2PY_INTENT_C) assert_(self.arr.flags['FORTRAN']) assert_(not self.arr.flags['CONTIGUOUS']) assert_(self.arr_attr[6] & wrap.FORTRAN) if obj is None: self.pyarr = None self.pyarr_attr = None return if intent.is_intent('cache'): assert_(isinstance(obj, ndarray), repr(type(obj))) self.pyarr = array(obj).reshape(*dims).copy() else: self.pyarr = array(array(obj, dtype=typ.dtypechar).reshape(*dims), order=self.intent.is_intent('c') and 'C' or 'F') assert_(self.pyarr.dtype == typ, repr((self.pyarr.dtype, typ))) assert_(self.pyarr.flags['OWNDATA'], (obj, intent)) self.pyarr_attr = wrap.array_attrs(self.pyarr) if len(dims) > 1: if self.intent.is_intent('c'): assert_(not self.pyarr.flags['FORTRAN']) assert_(self.pyarr.flags['CONTIGUOUS']) assert_(not self.pyarr_attr[6] & wrap.FORTRAN) else: assert_(self.pyarr.flags['FORTRAN']) assert_(not self.pyarr.flags['CONTIGUOUS']) assert_(self.pyarr_attr[6] & wrap.FORTRAN) assert_(self.arr_attr[1] == self.pyarr_attr[1]) # nd assert_(self.arr_attr[2] == self.pyarr_attr[2]) # dimensions if self.arr_attr[1] <= 1: assert_(self.arr_attr[3] == self.pyarr_attr[3], repr((self.arr_attr[3], self.pyarr_attr[3], self.arr.tobytes(), self.pyarr.tobytes()))) # strides assert_(self.arr_attr[5][-2:] == self.pyarr_attr[5][-2:], repr((self.arr_attr[5], self.pyarr_attr[5]))) # descr assert_(self.arr_attr[6] == self.pyarr_attr[6], repr((self.arr_attr[6], self.pyarr_attr[6], flags2names(0 * self.arr_attr[6] - self.pyarr_attr[6]), flags2names(self.arr_attr[6]), intent))) # flags if intent.is_intent('cache'): assert_(self.arr_attr[5][3] >= self.type.elsize, repr((self.arr_attr[5][3], self.type.elsize))) else: assert_(self.arr_attr[5][3] == self.type.elsize, repr((self.arr_attr[5][3], self.type.elsize))) assert_(self.arr_equal(self.pyarr, self.arr)) if isinstance(self.obj, ndarray): if typ.elsize == Type(obj.dtype).elsize: if not intent.is_intent('copy') and self.arr_attr[1] <= 1: assert_(self.has_shared_memory()) def arr_equal(self, arr1, arr2): if arr1.shape != arr2.shape: return False s = arr1 == arr2 return alltrue(s.flatten()) def __str__(self): return str(self.arr) def has_shared_memory(self): """Check that created array shares data with input array. """ if self.obj is self.arr: return True if not isinstance(self.obj, ndarray): return False obj_attr = wrap.array_attrs(self.obj) return obj_attr[0] == self.arr_attr[0] class test_intent(unittest.TestCase): def test_in_out(self): assert_equal(str(intent.in_.out), 'intent(in,out)') assert_(intent.in_.c.is_intent('c')) assert_(not intent.in_.c.is_intent_exact('c')) assert_(intent.in_.c.is_intent_exact('c', 'in')) assert_(intent.in_.c.is_intent_exact('in', 'c')) assert_(not intent.in_.is_intent('c')) class _test_shared_memory: num2seq = [1, 2] num23seq = [[1, 2, 3], [4, 5, 6]] def test_in_from_2seq(self): a = self.array([2], intent.in_, self.num2seq) assert_(not a.has_shared_memory()) def test_in_from_2casttype(self): for t in self.type.cast_types(): obj = array(self.num2seq, dtype=t.dtype) a = self.array([len(self.num2seq)], intent.in_, obj) if t.elsize == self.type.elsize: assert_( a.has_shared_memory(), repr((self.type.dtype, t.dtype))) else: assert_(not a.has_shared_memory(), repr(t.dtype)) def test_inout_2seq(self): obj = array(self.num2seq, dtype=self.type.dtype) a = self.array([len(self.num2seq)], intent.inout, obj) assert_(a.has_shared_memory()) try: a = self.array([2], intent.in_.inout, self.num2seq) except TypeError as msg: if not str(msg).startswith('failed to initialize intent' '(inout|inplace|cache) array'): raise else: raise SystemError('intent(inout) should have failed on sequence') def test_f_inout_23seq(self): obj = array(self.num23seq, dtype=self.type.dtype, order='F') shape = (len(self.num23seq), len(self.num23seq[0])) a = self.array(shape, intent.in_.inout, obj) assert_(a.has_shared_memory()) <|fim▁hole|> a = self.array(shape, intent.in_.inout, obj) except ValueError as msg: if not str(msg).startswith('failed to initialize intent' '(inout) array'): raise else: raise SystemError( 'intent(inout) should have failed on improper array') def test_c_inout_23seq(self): obj = array(self.num23seq, dtype=self.type.dtype) shape = (len(self.num23seq), len(self.num23seq[0])) a = self.array(shape, intent.in_.c.inout, obj) assert_(a.has_shared_memory()) def test_in_copy_from_2casttype(self): for t in self.type.cast_types(): obj = array(self.num2seq, dtype=t.dtype) a = self.array([len(self.num2seq)], intent.in_.copy, obj) assert_(not a.has_shared_memory(), repr(t.dtype)) def test_c_in_from_23seq(self): a = self.array([len(self.num23seq), len(self.num23seq[0])], intent.in_, self.num23seq) assert_(not a.has_shared_memory()) def test_in_from_23casttype(self): for t in self.type.cast_types(): obj = array(self.num23seq, dtype=t.dtype) a = self.array([len(self.num23seq), len(self.num23seq[0])], intent.in_, obj) assert_(not a.has_shared_memory(), repr(t.dtype)) def test_f_in_from_23casttype(self): for t in self.type.cast_types(): obj = array(self.num23seq, dtype=t.dtype, order='F') a = self.array([len(self.num23seq), len(self.num23seq[0])], intent.in_, obj) if t.elsize == self.type.elsize: assert_(a.has_shared_memory(), repr(t.dtype)) else: assert_(not a.has_shared_memory(), repr(t.dtype)) def test_c_in_from_23casttype(self): for t in self.type.cast_types(): obj = array(self.num23seq, dtype=t.dtype) a = self.array([len(self.num23seq), len(self.num23seq[0])], intent.in_.c, obj) if t.elsize == self.type.elsize: assert_(a.has_shared_memory(), repr(t.dtype)) else: assert_(not a.has_shared_memory(), repr(t.dtype)) def test_f_copy_in_from_23casttype(self): for t in self.type.cast_types(): obj = array(self.num23seq, dtype=t.dtype, order='F') a = self.array([len(self.num23seq), len(self.num23seq[0])], intent.in_.copy, obj) assert_(not a.has_shared_memory(), repr(t.dtype)) def test_c_copy_in_from_23casttype(self): for t in self.type.cast_types(): obj = array(self.num23seq, dtype=t.dtype) a = self.array([len(self.num23seq), len(self.num23seq[0])], intent.in_.c.copy, obj) assert_(not a.has_shared_memory(), repr(t.dtype)) def test_in_cache_from_2casttype(self): for t in self.type.all_types(): if t.elsize != self.type.elsize: continue obj = array(self.num2seq, dtype=t.dtype) shape = (len(self.num2seq),) a = self.array(shape, intent.in_.c.cache, obj) assert_(a.has_shared_memory(), repr(t.dtype)) a = self.array(shape, intent.in_.cache, obj) assert_(a.has_shared_memory(), repr(t.dtype)) obj = array(self.num2seq, dtype=t.dtype, order='F') a = self.array(shape, intent.in_.c.cache, obj) assert_(a.has_shared_memory(), repr(t.dtype)) a = self.array(shape, intent.in_.cache, obj) assert_(a.has_shared_memory(), repr(t.dtype)) try: a = self.array(shape, intent.in_.cache, obj[::-1]) except ValueError as msg: if not str(msg).startswith('failed to initialize' ' intent(cache) array'): raise else: raise SystemError( 'intent(cache) should have failed on multisegmented array') def test_in_cache_from_2casttype_failure(self): for t in self.type.all_types(): if t.elsize >= self.type.elsize: continue obj = array(self.num2seq, dtype=t.dtype) shape = (len(self.num2seq),) try: self.array(shape, intent.in_.cache, obj) # Should succeed except ValueError as msg: if not str(msg).startswith('failed to initialize' ' intent(cache) array'): raise else: raise SystemError( 'intent(cache) should have failed on smaller array') def test_cache_hidden(self): shape = (2,) a = self.array(shape, intent.cache.hide, None) assert_(a.arr.shape == shape) shape = (2, 3) a = self.array(shape, intent.cache.hide, None) assert_(a.arr.shape == shape) shape = (-1, 3) try: a = self.array(shape, intent.cache.hide, None) except ValueError as msg: if not str(msg).startswith('failed to create intent' '(cache|hide)|optional array'): raise else: raise SystemError( 'intent(cache) should have failed on undefined dimensions') def test_hidden(self): shape = (2,) a = self.array(shape, intent.hide, None) assert_(a.arr.shape == shape) assert_(a.arr_equal(a.arr, zeros(shape, dtype=self.type.dtype))) shape = (2, 3) a = self.array(shape, intent.hide, None) assert_(a.arr.shape == shape) assert_(a.arr_equal(a.arr, zeros(shape, dtype=self.type.dtype))) assert_(a.arr.flags['FORTRAN'] and not a.arr.flags['CONTIGUOUS']) shape = (2, 3) a = self.array(shape, intent.c.hide, None) assert_(a.arr.shape == shape) assert_(a.arr_equal(a.arr, zeros(shape, dtype=self.type.dtype))) assert_(not a.arr.flags['FORTRAN'] and a.arr.flags['CONTIGUOUS']) shape = (-1, 3) try: a = self.array(shape, intent.hide, None) except ValueError as msg: if not str(msg).startswith('failed to create intent' '(cache|hide)|optional array'): raise else: raise SystemError('intent(hide) should have failed' ' on undefined dimensions') def test_optional_none(self): shape = (2,) a = self.array(shape, intent.optional, None) assert_(a.arr.shape == shape) assert_(a.arr_equal(a.arr, zeros(shape, dtype=self.type.dtype))) shape = (2, 3) a = self.array(shape, intent.optional, None) assert_(a.arr.shape == shape) assert_(a.arr_equal(a.arr, zeros(shape, dtype=self.type.dtype))) assert_(a.arr.flags['FORTRAN'] and not a.arr.flags['CONTIGUOUS']) shape = (2, 3) a = self.array(shape, intent.c.optional, None) assert_(a.arr.shape == shape) assert_(a.arr_equal(a.arr, zeros(shape, dtype=self.type.dtype))) assert_(not a.arr.flags['FORTRAN'] and a.arr.flags['CONTIGUOUS']) def test_optional_from_2seq(self): obj = self.num2seq shape = (len(obj),) a = self.array(shape, intent.optional, obj) assert_(a.arr.shape == shape) assert_(not a.has_shared_memory()) def test_optional_from_23seq(self): obj = self.num23seq shape = (len(obj), len(obj[0])) a = self.array(shape, intent.optional, obj) assert_(a.arr.shape == shape) assert_(not a.has_shared_memory()) a = self.array(shape, intent.optional.c, obj) assert_(a.arr.shape == shape) assert_(not a.has_shared_memory()) def test_inplace(self): obj = array(self.num23seq, dtype=self.type.dtype) assert_(not obj.flags['FORTRAN'] and obj.flags['CONTIGUOUS']) shape = obj.shape a = self.array(shape, intent.inplace, obj) assert_(obj[1][2] == a.arr[1][2], repr((obj, a.arr))) a.arr[1][2] = 54 assert_(obj[1][2] == a.arr[1][2] == array(54, dtype=self.type.dtype), repr((obj, a.arr))) assert_(a.arr is obj) assert_(obj.flags['FORTRAN']) # obj attributes are changed inplace! assert_(not obj.flags['CONTIGUOUS']) def test_inplace_from_casttype(self): for t in self.type.cast_types(): if t is self.type: continue obj = array(self.num23seq, dtype=t.dtype) assert_(obj.dtype.type == t.dtype) assert_(obj.dtype.type is not self.type.dtype) assert_(not obj.flags['FORTRAN'] and obj.flags['CONTIGUOUS']) shape = obj.shape a = self.array(shape, intent.inplace, obj) assert_(obj[1][2] == a.arr[1][2], repr((obj, a.arr))) a.arr[1][2] = 54 assert_(obj[1][2] == a.arr[1][2] == array(54, dtype=self.type.dtype), repr((obj, a.arr))) assert_(a.arr is obj) assert_(obj.flags['FORTRAN']) # obj attributes changed inplace! assert_(not obj.flags['CONTIGUOUS']) assert_(obj.dtype.type is self.type.dtype) # obj changed inplace! for t in _type_names: exec('''\ class test_%s_gen(unittest.TestCase, _test_shared_memory ): def setUp(self): self.type = Type(%r) array = lambda self,dims,intent,obj: Array(Type(%r),dims,intent,obj) ''' % (t, t, t)) if __name__ == "__main__": setup() run_module_suite()<|fim▁end|>
obj = array(self.num23seq, dtype=self.type.dtype, order='C') shape = (len(self.num23seq), len(self.num23seq[0])) try:
<|file_name|>SWMMOpenMINoGlobalsPythonTest.py<|end_file_name|><|fim▁begin|>from ctypes import* import math <|fim▁hole|>print(lib) print("\n") finp = b"Z:\\Documents\\Projects\\SWMMOpenMIComponent\\Source\\SWMMOpenMINoGlobalsPythonTest\\test.inp" frpt = b"Z:\\Documents\\Projects\\SWMMOpenMIComponent\\Source\\SWMMOpenMINoGlobalsPythonTest\\test.rpt" fout = b"Z:\\Documents\\Projects\\SWMMOpenMIComponent\\Source\\SWMMOpenMINoGlobalsPythonTest\\test.out" project = lib.swmm_open(finp , frpt , fout) print(project) print("\n") newHour = 0 oldHour = 0 theDay = 0 theHour = 0 elapsedTime = c_double() if(lib.swmm_getErrorCode(project) == 0): lib.swmm_start(project, 1) if(lib.swmm_getErrorCode(project) == 0): print("Simulating day: 0 Hour: 0") print("\n") while True: lib.swmm_step(project, byref(elapsedTime)) newHour = elapsedTime.value * 24 if(newHour > oldHour): theDay = int(elapsedTime.value) temp = math.floor(elapsedTime.value) temp = (elapsedTime.value - temp) * 24.0 theHour = int(temp) #print("\b\b\b\b\b\b\b\b\b\b\b\b\b\b") #print("\n") print "Hour " , str(theHour) , " Day " , str(theDay) , ' \r', #print("\n") oldHour = newHour if(elapsedTime.value <= 0 or not lib.swmm_getErrorCode(project) == 0): break lib.swmm_end(project) lib.swmm_report(project) lib.swmm_close(project)<|fim▁end|>
lib = cdll.LoadLibrary("Z:\\Documents\Projects\\SWMMOpenMIComponent\\Source\\SWMMOpenMIComponent\\bin\\Debug\\SWMMComponent.dll")
<|file_name|>test_reindex_library.py<|end_file_name|><|fim▁begin|>""" Tests for library reindex command """ import ddt from django.core.management import call_command, CommandError import mock from xmodule.modulestore import ModuleStoreEnum from xmodule.modulestore.django import modulestore from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from common.test.utils import nostderr from xmodule.modulestore.tests.factories import CourseFactory, LibraryFactory from opaque_keys import InvalidKeyError from contentstore.management.commands.reindex_library import Command as ReindexCommand from contentstore.courseware_index import SearchIndexingError @ddt.ddt class TestReindexLibrary(ModuleStoreTestCase): """ Tests for library reindex command """ def setUp(self): """ Setup method - create libraries and courses """ super(TestReindexLibrary, self).setUp() self.store = modulestore() self.first_lib = LibraryFactory.create( org="test", library="lib1", display_name="run1", default_store=ModuleStoreEnum.Type.split ) self.second_lib = LibraryFactory.create( org="test", library="lib2", display_name="run2", default_store=ModuleStoreEnum.Type.split ) self.first_course = CourseFactory.create( org="test", course="course1", display_name="run1", default_store=ModuleStoreEnum.Type.split ) self.second_course = CourseFactory.create( org="test", course="course2", display_name="run1", default_store=ModuleStoreEnum.Type.split ) REINDEX_PATH_LOCATION = 'contentstore.management.commands.reindex_library.LibrarySearchIndexer.do_library_reindex' MODULESTORE_PATCH_LOCATION = 'contentstore.management.commands.reindex_library.modulestore' YESNO_PATCH_LOCATION = 'contentstore.management.commands.reindex_library.query_yes_no' def _get_lib_key(self, library): """ Get's library key as it is passed to indexer """ return library.location.library_key def _build_calls(self, *libraries): """ BUilds a list of mock.call instances representing calls to reindexing method """ return [mock.call(self.store, self._get_lib_key(lib)) for lib in libraries] def test_given_no_arguments_raises_command_error(self): """ Test that raises CommandError for incorrect arguments """ with self.assertRaises(SystemExit), nostderr(): with self.assertRaisesRegexp(CommandError, ".* requires one or more arguments .*"): call_command('reindex_library') @ddt.data('qwerty', 'invalid_key', 'xblock-v1:qwe+rty') def test_given_invalid_lib_key_raises_not_found(self, invalid_key): """ Test that raises InvalidKeyError for invalid keys """ with self.assertRaises(InvalidKeyError): call_command('reindex_library', invalid_key) def test_given_course_key_raises_command_error(self): """ Test that raises CommandError if course key is passed """ with self.assertRaises(SystemExit), nostderr(): with self.assertRaisesRegexp(CommandError, ".* is not a library key"): call_command('reindex_library', unicode(self.first_course.id)) with self.assertRaises(SystemExit), nostderr(): with self.assertRaisesRegexp(CommandError, ".* is not a library key"): call_command('reindex_library', unicode(self.second_course.id)) with self.assertRaises(SystemExit), nostderr(): with self.assertRaisesRegexp(CommandError, ".* is not a library key"): call_command( 'reindex_library', unicode(self.second_course.id), unicode(self._get_lib_key(self.first_lib)) ) def test_given_id_list_indexes_libraries(self): """ Test that reindexes libraries when given single library key or a list of library keys """ with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index, \ mock.patch(self.MODULESTORE_PATCH_LOCATION, mock.Mock(return_value=self.store)): call_command('reindex_library', unicode(self._get_lib_key(self.first_lib))) self.assertEqual(patched_index.mock_calls, self._build_calls(self.first_lib)) patched_index.reset_mock() call_command('reindex_library', unicode(self._get_lib_key(self.second_lib))) self.assertEqual(patched_index.mock_calls, self._build_calls(self.second_lib)) patched_index.reset_mock() call_command( 'reindex_library', unicode(self._get_lib_key(self.first_lib)), unicode(self._get_lib_key(self.second_lib)) ) expected_calls = self._build_calls(self.first_lib, self.second_lib) self.assertEqual(patched_index.mock_calls, expected_calls) def test_given_all_key_prompts_and_reindexes_all_libraries(self): """ Test that reindexes all libraries when --all key is given and confirmed """ with mock.patch(self.YESNO_PATCH_LOCATION) as patched_yes_no: patched_yes_no.return_value = True with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index, \ mock.patch(self.MODULESTORE_PATCH_LOCATION, mock.Mock(return_value=self.store)): call_command('reindex_library', all=True) patched_yes_no.assert_called_once_with(ReindexCommand.CONFIRMATION_PROMPT, default='no') expected_calls = self._build_calls(self.first_lib, self.second_lib) self.assertItemsEqual(patched_index.mock_calls, expected_calls) def test_given_all_key_prompts_and_reindexes_all_libraries_cancelled(self): """ Test that does not reindex anything when --all key is given and cancelled """ with mock.patch(self.YESNO_PATCH_LOCATION) as patched_yes_no: patched_yes_no.return_value = False with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index, \ mock.patch(self.MODULESTORE_PATCH_LOCATION, mock.Mock(return_value=self.store)): call_command('reindex_library', all=True) patched_yes_no.assert_called_once_with(ReindexCommand.CONFIRMATION_PROMPT, default='no') patched_index.assert_not_called() def test_fail_fast_if_reindex_fails(self): """ Test that fails on first reindexing exception """ with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index: patched_index.side_effect = SearchIndexingError("message", []) with self.assertRaises(SearchIndexingError):<|fim▁hole|><|fim▁end|>
call_command('reindex_library', unicode(self._get_lib_key(self.second_lib)))
<|file_name|>url_package.py<|end_file_name|><|fim▁begin|>import posixpath class UrlPackage: """ Represents a package specified as a Url """ def __init__(self, url): """ Initialize with the url """ if ':' in url: self.url = url else: self.url = posixpath.join('git+git://github.com', url) @property def installAs(self): """ Return the string to use to Install the package via pip """ return self.url <|fim▁hole|> """ Return the text to use for adding to the Requirements file """ return self.url<|fim▁end|>
def forRequirements(self, versions):
<|file_name|>wiki.go<|end_file_name|><|fim▁begin|>package robots import ( "fmt" "log" "net/http" "net/url" "strings" ) type WikiBot struct { } func init() { RegisterRobot("/wiki", func() (robot Robot) { return new(WikiBot) }) } <|fim▁hole|> func (w WikiBot) DeferredAction(command *SlashCommand) { text := strings.TrimSpace(command.Text) if text != "" { response := new(IncomingWebhook) response.Channel = command.Channel_ID response.Username = "Wiki Bot" response.Text = fmt.Sprintf("@%s: Searching google for wikis relating to: %s", command.User_Name, text) response.Icon_Emoji = ":ghost:" response.Unfurl_Links = true response.Parse = "full" MakeIncomingWebhookCall(response) resp, err := http.Get(fmt.Sprintf("http://www.google.com/search?q=(site:en.wikipedia.org+OR+site:ja.wikipedia.org)+%s&btnI", url.QueryEscape(text))) defer resp.Body.Close() if resp.StatusCode != 200 { message := fmt.Sprintf("ERROR: Non-200 Response from Google: %s", resp.Status) log.Println(message) response.Text = fmt.Sprintf("@%s: %s", command.User_Name, message) } else if err != nil { response.Text = fmt.Sprintf("@%s: %s", command.User_Name, "Error getting wikipedia link from google :(") } else { response.Text = fmt.Sprintf("@%s: %s", command.User_Name, resp.Request.URL.String()) } MakeIncomingWebhookCall(response) } } func (w WikiBot) Description() (description string) { return "Wiki bot!\n\tUsage: /wiki\n\tExpected Response: @user: Link to wikipedia article!" }<|fim▁end|>
func (w WikiBot) Run(command *SlashCommand) (slashCommandImmediateReturn string) { go w.DeferredAction(command) return "" }
<|file_name|>semigroup.rs<|end_file_name|><|fim▁begin|>//! Module for holding the Semigroup typeclass definition and typeclass instances //! //! You can, for example, combine tuples. #![cfg_attr( feature = "std", doc = r#" # Examples ``` #[macro_use] extern crate frunk; # fn main() { use frunk::Semigroup; let t1 = (1, 2.5f32, String::from("hi"), Some(3)); let t2 = (1, 2.5f32, String::from(" world"), None); let expected = (2, 5.0f32, String::from("hi world"), Some(3)); assert_eq!(t1.combine(&t2), expected); // ultimately, the Tuple-based Semigroup implementations are only available for a maximum of // 26 elements. If you need more, use HList, which is has no such limit. let h1 = hlist![1, 3.3, 53i64]; let h2 = hlist![2, 1.2, 1i64]; let h3 = hlist![3, 4.5, 54]; assert_eq!(h1.combine(&h2), h3) # } ```"# )] use frunk_core::hlist::*; use std::cell::*; use std::cmp::Ordering; #[cfg(feature = "std")] use std::collections::hash_map::Entry; #[cfg(feature = "std")] use std::collections::{HashMap, HashSet}; #[cfg(feature = "std")] use std::hash::Hash; use std::ops::{BitAnd, BitOr, Deref}; /// Wrapper type for types that are ordered and can have a Max combination #[derive(PartialEq, Debug, Eq, Clone, Copy, PartialOrd, Ord, Hash)] pub struct Max<T: Ord>(pub T); /// Wrapper type for types that are ordered and can have a Min combination #[derive(PartialEq, Debug, Eq, Clone, Copy, PartialOrd, Ord, Hash)] pub struct Min<T: Ord>(pub T); /// Wrapper type for types that can have a Product combination #[derive(PartialEq, Debug, Eq, Clone, Copy, PartialOrd, Ord, Hash)] pub struct Product<T>(pub T); /// Wrapper type for boolean that acts as a bitwise && combination #[derive(PartialEq, Debug, Eq, Clone, Copy, PartialOrd, Ord, Hash)] pub struct All<T>(pub T); /// Wrapper type for boolean that acts as a bitwise || combination #[derive(PartialEq, Debug, Eq, Clone, Copy, PartialOrd, Ord, Hash)] pub struct Any<T>(pub T); /// A Semigroup is a class of thing that has a definable combine operation pub trait Semigroup { /// Associative operation taking which combines two values. /// /// # Examples /// /// ``` /// use frunk::Semigroup; /// /// assert_eq!(Some(1).combine(&Some(2)), Some(3)) /// ``` fn combine(&self, other: &Self) -> Self; } /// Allow the combination of any two HLists having the same structure /// if all of the sub-element types are also Semiups impl<H: Semigroup, T: HList + Semigroup> Semigroup for HCons<H, T> { fn combine(&self, other: &Self) -> Self { self.tail .combine(&other.tail) .prepend(self.head.combine(&other.head)) } } /// Since () + () = (), the same is true for HNil impl Semigroup for HNil { fn combine(&self, _: &Self) -> Self { *self } } /// Return this combined with itself `n` times. pub fn combine_n<T>(o: &T, times: u32) -> T where T: Semigroup + Clone, { let mut x = o.clone(); // note: range is non-inclusive in the upper bound for _ in 1..times { x = o.combine(&x); } x } /// Given a sequence of `xs`, combine them and return the total /// /// If the sequence is empty, returns None. Otherwise, returns Some(total). /// /// # Examples /// /// ``` /// use frunk::semigroup::combine_all_option; /// /// let v1 = &vec![1, 2, 3]; /// assert_eq!(combine_all_option(v1), Some(6)); /// /// let v2: Vec<i16> = Vec::new(); // empty! /// assert_eq!(combine_all_option(&v2), None); /// ``` pub fn combine_all_option<T>(xs: &[T]) -> Option<T> where T: Semigroup + Clone, { xs.first() .map(|head| xs[1..].iter().fold(head.clone(), |a, b| a.combine(b))) } macro_rules! numeric_semigroup_imps { ($($tr:ty),*) => { $( impl Semigroup for $tr { fn combine(&self, other: &Self) -> Self { self + other } } )*<|fim▁hole|>numeric_semigroup_imps!(i8, i16, i32, i64, u8, u16, u32, u64, isize, usize, f32, f64); macro_rules! numeric_product_semigroup_imps { ($($tr:ty),*) => { $( impl Semigroup for Product<$tr> { fn combine(&self, other: &Self) -> Self { let Product(x) = *self; let Product(y) = *other; Product(x * y) } } )* } } numeric_product_semigroup_imps!(i8, i16, i32, i64, u8, u16, u32, u64, isize, usize, f32, f64); impl<T> Semigroup for Option<T> where T: Semigroup + Clone, { fn combine(&self, other: &Self) -> Self { match (self, other) { (Some(ref v), Some(ref v_other)) => Some(v.combine(v_other)), (Some(_), _) => self.clone(), _ => other.clone(), } } } #[cfg(feature = "std")] impl<T: Semigroup> Semigroup for Box<T> { fn combine(&self, other: &Self) -> Self { Box::new(self.deref().combine(other.deref())) } } #[cfg(feature = "std")] impl Semigroup for String { fn combine(&self, other: &Self) -> Self { let mut cloned = self.clone(); cloned.push_str(other); cloned } } #[cfg(feature = "std")] impl<T: Clone> Semigroup for Vec<T> { fn combine(&self, other: &Self) -> Self { let mut v = self.clone(); v.extend_from_slice(other); v } } impl<T> Semigroup for Cell<T> where T: Semigroup + Copy, { fn combine(&self, other: &Self) -> Self { Cell::new(self.get().combine(&(other.get()))) } } impl<T: Semigroup> Semigroup for RefCell<T> { fn combine(&self, other: &Self) -> Self { let self_b = self.borrow(); let other_b = other.borrow(); RefCell::new(self_b.deref().combine(other_b.deref())) } } #[cfg(feature = "std")] impl<T> Semigroup for HashSet<T> where T: Eq + Hash + Clone, { fn combine(&self, other: &Self) -> Self { self.union(other).cloned().collect() } } #[cfg(feature = "std")] impl<K, V> Semigroup for HashMap<K, V> where K: Eq + Hash + Clone, V: Semigroup + Clone, { fn combine(&self, other: &Self) -> Self { let mut h: HashMap<K, V> = self.clone(); for (k, v) in other { let k_clone = k.clone(); match h.entry(k_clone) { Entry::Occupied(o) => { let existing = o.into_mut(); let comb = existing.combine(v); *existing = comb; } Entry::Vacant(o) => { o.insert(v.clone()); } } } h } } impl<T> Semigroup for Max<T> where T: Ord + Clone, { fn combine(&self, other: &Self) -> Self { let x = self.0.clone(); let y = other.0.clone(); match x.cmp(&y) { Ordering::Less => Max(y), _ => Max(x), } } } impl<T> Semigroup for Min<T> where T: Ord + Clone, { fn combine(&self, other: &Self) -> Self { let x = self.0.clone(); let y = other.0.clone(); match x.cmp(&y) { Ordering::Less => Min(x), _ => Min(y), } } } // Deriving for all BitAnds sucks because we are then bound on ::Output, which may not be the same type macro_rules! simple_all_impls { ($($tr:ty)*) => { $( impl Semigroup for All<$tr> { fn combine(&self, other: &Self) -> Self { let x = self.0; let y = other.0; All(x.bitand(y)) } } )* } } simple_all_impls! { bool usize u8 u16 u32 u64 isize i8 i16 i32 i64 } macro_rules! simple_any_impls { ($($tr:ty)*) => { $( impl Semigroup for Any<$tr> { fn combine(&self, other: &Self) -> Self { let x = self.0; let y = other.0; Any(x.bitor(y)) } } )* } } simple_any_impls! { bool usize u8 u16 u32 u64 isize i8 i16 i32 i64 } macro_rules! tuple_impls { () => {}; // no more (($idx:tt => $typ:ident), $( ($nidx:tt => $ntyp:ident), )*) => { // Invoke recursive reversal of list that ends in the macro expansion implementation // of the reversed list // tuple_impls!([($idx, $typ);] $( ($nidx => $ntyp), )*); tuple_impls!($( ($nidx => $ntyp), )*); // invoke macro on tail }; // ([accumulatedList], listToReverse); recursively calls tuple_impls until the list to reverse // + is empty (see next pattern) // ([$(($accIdx: tt, $accTyp: ident);)+] ($idx:tt => $typ:ident), $( ($nidx:tt => $ntyp:ident), )*) => { tuple_impls!([($idx, $typ); $(($accIdx, $accTyp); )*] $( ($nidx => $ntyp), ) *); }; // Finally expand into our implementation ([($idx:tt, $typ:ident); $( ($nidx:tt, $ntyp:ident); )*]) => { impl<$typ: Semigroup, $( $ntyp: Semigroup),*> Semigroup for ($typ, $( $ntyp ),*) { fn combine(&self, other: &Self) -> Self { (self.$idx.combine(&other.$idx), $(self.$nidx.combine(&other.$nidx), )*) } } } } tuple_impls! { (20 => U), (19 => T), (18 => S), (17 => R), (16 => Q), (15 => P), (14 => O), (13 => N), (12 => M), (11 => L), (10 => K), (9 => J), (8 => I), (7 => H), (6 => G), (5 => F), (4 => E), (3 => D), (2 => C), (1 => B), (0 => A), } #[cfg(test)] mod tests { use super::*; macro_rules! semigroup_tests { ($($name:ident, $comb: expr => $expected: expr, $tr:ty)+) => { $( #[test] fn $name() { let r: $tr = $comb; assert_eq!(r, $expected) } )* } } semigroup_tests! { test_i8, 1.combine(&2) => 3, i8 test_product_i8, Product(1).combine(&Product(2)) => Product(2), Product<i8> test_i16, 1.combine(&2) => 3, i16 test_i32, 1.combine(&2) => 3, i32 test_u8, 1.combine(&2) => 3, u8 test_u16, 1.combine(&2) => 3, u16 test_u32, 1.combine(&2) => 3, u32 test_usize, 1.combine(&2) => 3, usize test_isize, 1.combine(&2) => 3, isize test_f32, 1f32.combine(&2f32) => 3f32, f32 test_f64, 1f64.combine(&2f64) => 3f64, f64 test_option_i16, Some(1).combine(&Some(2)) => Some(3), Option<i16> test_option_i16_none1, None.combine(&Some(2)) => Some(2), Option<i16> test_option_i16_none2, Some(2).combine(&None) => Some(2), Option<i16> } #[test] #[cfg(feature = "std")] fn test_string() { let v1 = String::from("Hello"); let v2 = String::from(" world"); assert_eq!(v1.combine(&v2), "Hello world") } #[test] #[cfg(feature = "std")] fn test_vec_i32() { let v1 = vec![1, 2, 3]; let v2 = vec![4, 5, 6]; assert_eq!(v1.combine(&v2), vec![1, 2, 3, 4, 5, 6]) } #[test] fn test_refcell() { let v1 = RefCell::new(1); let v2 = RefCell::new(2); assert_eq!(v1.combine(&v2), RefCell::new(3)) } #[test] #[cfg(feature = "std")] fn test_hashset() { let mut v1 = HashSet::new(); v1.insert(1); v1.insert(2); assert!(!v1.contains(&3)); let mut v2 = HashSet::new(); v2.insert(3); v2.insert(4); assert!(!v2.contains(&1)); let mut expected = HashSet::new(); expected.insert(1); expected.insert(2); expected.insert(3); expected.insert(4); assert_eq!(v1.combine(&v2), expected) } #[test] #[cfg(feature = "std")] fn test_tuple() { let t1 = (1, 2.5f32, String::from("hi"), Some(3)); let t2 = (1, 2.5f32, String::from(" world"), None); let expected = (2, 5.0f32, String::from("hi world"), Some(3)); assert_eq!(t1.combine(&t2), expected) } #[test] fn test_max() { assert_eq!(Max(1).combine(&Max(2)), Max(2)); let v = [Max(1), Max(2), Max(3)]; assert_eq!(combine_all_option(&v), Some(Max(3))); } #[test] fn test_min() { assert_eq!(Min(1).combine(&Min(2)), Min(1)); let v = [Min(1), Min(2), Min(3)]; assert_eq!(combine_all_option(&v), Some(Min(1))); } #[test] fn test_all() { assert_eq!(All(3).combine(&All(5)), All(1)); assert_eq!(All(true).combine(&All(false)), All(false)); } #[test] fn test_any() { assert_eq!(Any(3).combine(&Any(5)), Any(7)); assert_eq!(Any(true).combine(&Any(false)), Any(true)); } #[test] #[cfg(feature = "std")] fn test_hashmap() { let mut v1: HashMap<i32, Option<String>> = HashMap::new(); v1.insert(1, Some("Hello".to_owned())); v1.insert(2, Some("Goodbye".to_owned())); v1.insert(4, None); let mut v2: HashMap<i32, Option<String>> = HashMap::new(); v2.insert(1, Some(" World".to_owned())); v2.insert(4, Some("Nope".to_owned())); let mut expected = HashMap::new(); expected.insert(1, Some("Hello World".to_owned())); expected.insert(2, Some("Goodbye".to_owned())); expected.insert(4, Some("Nope".to_owned())); assert_eq!(v1.combine(&v2), expected) } #[test] fn test_combine_all_option() { let v1 = [1, 2, 3]; assert_eq!(combine_all_option(&v1), Some(6)); let v2 = [Some(1), Some(2), Some(3)]; assert_eq!(combine_all_option(&v2), Some(Some(6))); } #[test] fn test_combine_n() { assert_eq!(combine_n(&1, 3), 3); assert_eq!(combine_n(&2, 1), 2); assert_eq!(combine_n(&Some(2), 4), Some(8)); } #[test] #[cfg(feature = "std")] fn test_combine_hlist() { let h1 = hlist![Some(1), 3.3, 53i64, "hello".to_owned()]; let h2 = hlist![Some(2), 1.2, 1i64, " world".to_owned()]; let h3 = hlist![Some(3), 4.5, 54, "hello world".to_owned()]; assert_eq!(h1.combine(&h2), h3) } }<|fim▁end|>
} }
<|file_name|>proxier.go<|end_file_name|><|fim▁begin|>/* Copyright 2014 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package userspace import ( "fmt" "net" "strconv" "strings" "sync" "sync/atomic" "syscall" "time" "github.com/golang/glog" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/proxy" "k8s.io/kubernetes/pkg/types" "k8s.io/kubernetes/pkg/util" "k8s.io/kubernetes/pkg/util/errors" "k8s.io/kubernetes/pkg/util/iptables" ) type portal struct { ip net.IP port int isExternal bool } type serviceInfo struct { isAliveAtomic int32 // Only access this with atomic ops portal portal protocol api.Protocol proxyPort int socket proxySocket timeout time.Duration activeClients *clientCache nodePort int loadBalancerStatus api.LoadBalancerStatus sessionAffinityType api.ServiceAffinity stickyMaxAgeMinutes int // Deprecated, but required for back-compat (including e2e) externalIPs []string } func (info *serviceInfo) setAlive(b bool) { var i int32 if b { i = 1 } atomic.StoreInt32(&info.isAliveAtomic, i) } func (info *serviceInfo) isAlive() bool { return atomic.LoadInt32(&info.isAliveAtomic) != 0 } func logTimeout(err error) bool { if e, ok := err.(net.Error); ok { if e.Timeout() { glog.V(3).Infof("connection to endpoint closed due to inactivity") return true } } return false } // Proxier is a simple proxy for TCP connections between a localhost:lport // and services that provide the actual implementations. type Proxier struct { loadBalancer LoadBalancer mu sync.Mutex // protects serviceMap serviceMap map[proxy.ServicePortName]*serviceInfo syncPeriod time.Duration portMapMutex sync.Mutex portMap map[portMapKey]*portMapValue numProxyLoops int32 // use atomic ops to access this; mostly for testing listenIP net.IP iptables iptables.Interface hostIP net.IP proxyPorts PortAllocator } // assert Proxier is a ProxyProvider var _ proxy.ProxyProvider = &Proxier{} // A key for the portMap. The ip has to be a tring because slices can't be map // keys. type portMapKey struct { ip string port int protocol api.Protocol } func (k *portMapKey) String() string { return fmt.Sprintf("%s:%d/%s", k.ip, k.port, k.protocol) } // A value for the portMap type portMapValue struct { owner proxy.ServicePortName socket interface { Close() error } } var ( // ErrProxyOnLocalhost is returned by NewProxier if the user requests a proxier on // the loopback address. May be checked for by callers of NewProxier to know whether // the caller provided invalid input. ErrProxyOnLocalhost = fmt.Errorf("cannot proxy on localhost") ) // IsProxyLocked returns true if the proxy could not acquire the lock on iptables. func IsProxyLocked(err error) bool { return strings.Contains(err.Error(), "holding the xtables lock") } // NewProxier returns a new Proxier given a LoadBalancer and an address on // which to listen. Because of the iptables logic, It is assumed that there // is only a single Proxier active on a machine. An error will be returned if // the proxier cannot be started due to an invalid ListenIP (loopback) or // if iptables fails to update or acquire the initial lock. Once a proxier is // created, it will keep iptables up to date in the background and will not // terminate if a particular iptables call fails. func NewProxier(loadBalancer LoadBalancer, listenIP net.IP, iptables iptables.Interface, pr util.PortRange, syncPeriod time.Duration) (*Proxier, error) { if listenIP.Equal(localhostIPv4) || listenIP.Equal(localhostIPv6) { return nil, ErrProxyOnLocalhost } hostIP, err := util.ChooseHostInterface() if err != nil { return nil, fmt.Errorf("failed to select a host interface: %v", err) } err = setRLimit(64 * 1000) if err != nil { return nil, fmt.Errorf("failed to set open file handler limit", err) } proxyPorts := newPortAllocator(pr) glog.V(2).Infof("Setting proxy IP to %v and initializing iptables", hostIP) return createProxier(loadBalancer, listenIP, iptables, hostIP, proxyPorts, syncPeriod) } func setRLimit(limit uint64) error { return syscall.Setrlimit(syscall.RLIMIT_NOFILE, &syscall.Rlimit{Max: limit, Cur: limit}) } func createProxier(loadBalancer LoadBalancer, listenIP net.IP, iptables iptables.Interface, hostIP net.IP, proxyPorts PortAllocator, syncPeriod time.Duration) (*Proxier, error) { // convenient to pass nil for tests.. if proxyPorts == nil { proxyPorts = newPortAllocator(util.PortRange{}) } // Set up the iptables foundations we need. if err := iptablesInit(iptables); err != nil { return nil, fmt.Errorf("failed to initialize iptables: %v", err) } // Flush old iptables rules (since the bound ports will be invalid after a restart). // When OnUpdate() is first called, the rules will be recreated. if err := iptablesFlush(iptables); err != nil { return nil, fmt.Errorf("failed to flush iptables: %v", err) } return &Proxier{ loadBalancer: loadBalancer, serviceMap: make(map[proxy.ServicePortName]*serviceInfo), portMap: make(map[portMapKey]*portMapValue), syncPeriod: syncPeriod, listenIP: listenIP, iptables: iptables, hostIP: hostIP, proxyPorts: proxyPorts, }, nil } // CleanupLeftovers removes all iptables rules and chains created by the Proxier // It returns true if an error was encountered. Errors are logged. func CleanupLeftovers(ipt iptables.Interface) (encounteredError bool) { // NOTE: Warning, this needs to be kept in sync with the userspace Proxier, // we want to ensure we remove all of the iptables rules it creates. // Currently they are all in iptablesInit() // Delete Rules first, then Flush and Delete Chains args := []string{"-m", "comment", "--comment", "handle ClusterIPs; NOTE: this must be before the NodePort rules"} if err := ipt.DeleteRule(iptables.TableNAT, iptables.ChainOutput, append(args, "-j", string(iptablesHostPortalChain))...); err != nil { glog.Errorf("Error removing userspace rule: %v", err) encounteredError = true } if err := ipt.DeleteRule(iptables.TableNAT, iptables.ChainPrerouting, append(args, "-j", string(iptablesContainerPortalChain))...); err != nil { glog.Errorf("Error removing userspace rule: %v", err) encounteredError = true } args = []string{"-m", "addrtype", "--dst-type", "LOCAL"} args = append(args, "-m", "comment", "--comment", "handle service NodePorts; NOTE: this must be the last rule in the chain") if err := ipt.DeleteRule(iptables.TableNAT, iptables.ChainOutput, append(args, "-j", string(iptablesHostNodePortChain))...); err != nil { glog.Errorf("Error removing userspace rule: %v", err) encounteredError = true } if err := ipt.DeleteRule(iptables.TableNAT, iptables.ChainPrerouting, append(args, "-j", string(iptablesContainerNodePortChain))...); err != nil { glog.Errorf("Error removing userspace rule: %v", err) encounteredError = true } // flush and delete chains. chains := []iptables.Chain{iptablesContainerPortalChain, iptablesHostPortalChain, iptablesHostNodePortChain, iptablesContainerNodePortChain} for _, c := range chains { // flush chain, then if sucessful delete, delete will fail if flush fails. if err := ipt.FlushChain(iptables.TableNAT, c); err != nil { glog.Errorf("Error flushing userspace chain: %v", err) encounteredError = true } else { if err = ipt.DeleteChain(iptables.TableNAT, c); err != nil { glog.Errorf("Error deleting userspace chain: %v", err) encounteredError = true } } } return encounteredError } // Sync is called to immediately synchronize the proxier state to iptables func (proxier *Proxier) Sync() { if err := iptablesInit(proxier.iptables); err != nil { glog.Errorf("Failed to ensure iptables: %v", err) } proxier.ensurePortals() proxier.cleanupStaleStickySessions() } // SyncLoop runs periodic work. This is expected to run as a goroutine or as the main loop of the app. It does not return. func (proxier *Proxier) SyncLoop() { t := time.NewTicker(proxier.syncPeriod) defer t.Stop() for { <-t.C glog.V(6).Infof("Periodic sync") proxier.Sync() } } // Ensure that portals exist for all services. func (proxier *Proxier) ensurePortals() { proxier.mu.Lock() defer proxier.mu.Unlock() // NB: This does not remove rules that should not be present. for name, info := range proxier.serviceMap { err := proxier.openPortal(name, info) if err != nil { glog.Errorf("Failed to ensure portal for %q: %v", name, err) } } } // clean up any stale sticky session records in the hash map. func (proxier *Proxier) cleanupStaleStickySessions() { proxier.mu.Lock() defer proxier.mu.Unlock() for name := range proxier.serviceMap { proxier.loadBalancer.CleanupStaleStickySessions(name) } } // This assumes proxier.mu is not locked. func (proxier *Proxier) stopProxy(service proxy.ServicePortName, info *serviceInfo) error { proxier.mu.Lock() defer proxier.mu.Unlock() return proxier.stopProxyInternal(service, info) } // This assumes proxier.mu is locked. func (proxier *Proxier) stopProxyInternal(service proxy.ServicePortName, info *serviceInfo) error { delete(proxier.serviceMap, service) info.setAlive(false) err := info.socket.Close() port := info.socket.ListenPort() proxier.proxyPorts.Release(port) return err } func (proxier *Proxier) getServiceInfo(service proxy.ServicePortName) (*serviceInfo, bool) { proxier.mu.Lock() defer proxier.mu.Unlock() info, ok := proxier.serviceMap[service] return info, ok } func (proxier *Proxier) setServiceInfo(service proxy.ServicePortName, info *serviceInfo) { proxier.mu.Lock() defer proxier.mu.Unlock() proxier.serviceMap[service] = info } // addServiceOnPort starts listening for a new service, returning the serviceInfo. // Pass proxyPort=0 to allocate a random port. The timeout only applies to UDP // connections, for now. func (proxier *Proxier) addServiceOnPort(service proxy.ServicePortName, protocol api.Protocol, proxyPort int, timeout time.Duration) (*serviceInfo, error) { sock, err := newProxySocket(protocol, proxier.listenIP, proxyPort) if err != nil { return nil, err } _, portStr, err := net.SplitHostPort(sock.Addr().String()) if err != nil { sock.Close() return nil, err } portNum, err := strconv.Atoi(portStr) if err != nil { sock.Close() return nil, err } si := &serviceInfo{ isAliveAtomic: 1, proxyPort: portNum, protocol: protocol, socket: sock, timeout: timeout, activeClients: newClientCache(), sessionAffinityType: api.ServiceAffinityNone, // default stickyMaxAgeMinutes: 180, // TODO: parameterize this in the API. } proxier.setServiceInfo(service, si) glog.V(2).Infof("Proxying for service %q on %s port %d", service, protocol, portNum) go func(service proxy.ServicePortName, proxier *Proxier) { defer util.HandleCrash() atomic.AddInt32(&proxier.numProxyLoops, 1) sock.ProxyLoop(service, si, proxier) atomic.AddInt32(&proxier.numProxyLoops, -1) }(service, proxier) return si, nil } // How long we leave idle UDP connections open. const udpIdleTimeout = 1 * time.Second // OnUpdate manages the active set of service proxies. // Active service proxies are reinitialized if found in the update set or // shutdown if missing from the update set. func (proxier *Proxier) OnServiceUpdate(services []api.Service) { glog.V(4).Infof("Received update notice: %+v", services) activeServices := make(map[proxy.ServicePortName]bool) // use a map as a set for i := range services { service := &services[i] // if ClusterIP is "None" or empty, skip proxying if !api.IsServiceIPSet(service) { glog.V(3).Infof("Skipping service %s due to clusterIP = %q", types.NamespacedName{Namespace: service.Namespace, Name: service.Name}, service.Spec.ClusterIP) continue } for i := range service.Spec.Ports { servicePort := &service.Spec.Ports[i] serviceName := proxy.ServicePortName{NamespacedName: types.NamespacedName{Namespace: service.Namespace, Name: service.Name}, Port: servicePort.Name} activeServices[serviceName] = true serviceIP := net.ParseIP(service.Spec.ClusterIP) info, exists := proxier.getServiceInfo(serviceName) // TODO: check health of the socket? What if ProxyLoop exited? if exists && sameConfig(info, service, servicePort) { // Nothing changed. continue } if exists { glog.V(4).Infof("Something changed for service %q: stopping it", serviceName) err := proxier.closePortal(serviceName, info) if err != nil { glog.Errorf("Failed to close portal for %q: %v", serviceName, err) } err = proxier.stopProxy(serviceName, info) if err != nil { glog.Errorf("Failed to stop service %q: %v", serviceName, err) } } proxyPort, err := proxier.proxyPorts.AllocateNext() if err != nil { glog.Errorf("failed to allocate proxy port for service %q: %v", serviceName, err) continue } glog.V(1).Infof("Adding new service %q at %s:%d/%s", serviceName, serviceIP, servicePort.Port, servicePort.Protocol) info, err = proxier.addServiceOnPort(serviceName, servicePort.Protocol, proxyPort, udpIdleTimeout) if err != nil { glog.Errorf("Failed to start proxy for %q: %v", serviceName, err) continue } info.portal.ip = serviceIP info.portal.port = servicePort.Port info.externalIPs = service.Spec.ExternalIPs // Deep-copy in case the service instance changes info.loadBalancerStatus = *api.LoadBalancerStatusDeepCopy(&service.Status.LoadBalancer) info.nodePort = servicePort.NodePort info.sessionAffinityType = service.Spec.SessionAffinity glog.V(4).Infof("info: %+v", info) err = proxier.openPortal(serviceName, info) if err != nil { glog.Errorf("Failed to open portal for %q: %v", serviceName, err) } proxier.loadBalancer.NewService(serviceName, info.sessionAffinityType, info.stickyMaxAgeMinutes) } } proxier.mu.Lock() defer proxier.mu.Unlock() for name, info := range proxier.serviceMap { if !activeServices[name] { glog.V(1).Infof("Stopping service %q", name) err := proxier.closePortal(name, info) if err != nil { glog.Errorf("Failed to close portal for %q: %v", name, err) } err = proxier.stopProxyInternal(name, info) if err != nil { glog.Errorf("Failed to stop service %q: %v", name, err) } } } } func sameConfig(info *serviceInfo, service *api.Service, port *api.ServicePort) bool { if info.protocol != port.Protocol || info.portal.port != port.Port || info.nodePort != port.NodePort { return false } if !info.portal.ip.Equal(net.ParseIP(service.Spec.ClusterIP)) { return false } if !ipsEqual(info.externalIPs, service.Spec.ExternalIPs) { return false } if !api.LoadBalancerStatusEqual(&info.loadBalancerStatus, &service.Status.LoadBalancer) { return false } if info.sessionAffinityType != service.Spec.SessionAffinity { return false } return true } func ipsEqual(lhs, rhs []string) bool { if len(lhs) != len(rhs) { return false } for i := range lhs { if lhs[i] != rhs[i] { return false } } return true } func (proxier *Proxier) openPortal(service proxy.ServicePortName, info *serviceInfo) error { err := proxier.openOnePortal(info.portal, info.protocol, proxier.listenIP, info.proxyPort, service) if err != nil { return err } for _, publicIP := range info.externalIPs { err = proxier.openOnePortal(portal{net.ParseIP(publicIP), info.portal.port, true}, info.protocol, proxier.listenIP, info.proxyPort, service) if err != nil { return err } } for _, ingress := range info.loadBalancerStatus.Ingress { if ingress.IP != "" { err = proxier.openOnePortal(portal{net.ParseIP(ingress.IP), info.portal.port, false}, info.protocol, proxier.listenIP, info.proxyPort, service) if err != nil { return err } } } if info.nodePort != 0 { err = proxier.openNodePort(info.nodePort, info.protocol, proxier.listenIP, info.proxyPort, service) if err != nil { return err } } return nil } func (proxier *Proxier) openOnePortal(portal portal, protocol api.Protocol, proxyIP net.IP, proxyPort int, name proxy.ServicePortName) error { if local, err := isLocalIP(portal.ip); err != nil { return fmt.Errorf("can't determine if IP is local, assuming not: %v", err) } else if local { err := proxier.claimNodePort(portal.ip, portal.port, protocol, name) if err != nil { return err } } // Handle traffic from containers. args := proxier.iptablesContainerPortalArgs(portal.ip, portal.isExternal, false, portal.port, protocol, proxyIP, proxyPort, name) existed, err := proxier.iptables.EnsureRule(iptables.Append, iptables.TableNAT, iptablesContainerPortalChain, args...) if err != nil { glog.Errorf("Failed to install iptables %s rule for service %q, args:%v", iptablesContainerPortalChain, name, args) return err } if !existed { glog.V(3).Infof("Opened iptables from-containers portal for service %q on %s %s:%d", name, protocol, portal.ip, portal.port) } if portal.isExternal { args := proxier.iptablesContainerPortalArgs(portal.ip, false, true, portal.port, protocol, proxyIP, proxyPort, name) existed, err := proxier.iptables.EnsureRule(iptables.Append, iptables.TableNAT, iptablesContainerPortalChain, args...) if err != nil { glog.Errorf("Failed to install iptables %s rule that opens service %q for local traffic, args:%v", iptablesContainerPortalChain, name, args) return err } if !existed { glog.V(3).Infof("Opened iptables from-containers portal for service %q on %s %s:%d for local traffic", name, protocol, portal.ip, portal.port) } args = proxier.iptablesHostPortalArgs(portal.ip, true, portal.port, protocol, proxyIP, proxyPort, name) existed, err = proxier.iptables.EnsureRule(iptables.Append, iptables.TableNAT, iptablesHostPortalChain, args...) if err != nil { glog.Errorf("Failed to install iptables %s rule for service %q for dst-local traffic", iptablesHostPortalChain, name) return err } if !existed { glog.V(3).Infof("Opened iptables from-host portal for service %q on %s %s:%d for dst-local traffic", name, protocol, portal.ip, portal.port) } return nil } // Handle traffic from the host. args = proxier.iptablesHostPortalArgs(portal.ip, false, portal.port, protocol, proxyIP, proxyPort, name) existed, err = proxier.iptables.EnsureRule(iptables.Append, iptables.TableNAT, iptablesHostPortalChain, args...) if err != nil { glog.Errorf("Failed to install iptables %s rule for service %q", iptablesHostPortalChain, name) return err } if !existed { glog.V(3).Infof("Opened iptables from-host portal for service %q on %s %s:%d", name, protocol, portal.ip, portal.port) } return nil } // Marks a port as being owned by a particular service, or returns error if already claimed. // Idempotent: reclaiming with the same owner is not an error func (proxier *Proxier) claimNodePort(ip net.IP, port int, protocol api.Protocol, owner proxy.ServicePortName) error { proxier.portMapMutex.Lock() defer proxier.portMapMutex.Unlock() // TODO: We could pre-populate some reserved ports into portMap and/or blacklist some well-known ports key := portMapKey{ip: ip.String(), port: port, protocol: protocol} existing, found := proxier.portMap[key] if !found { // Hold the actual port open, even though we use iptables to redirect // it. This ensures that a) it's safe to take and b) that stays true. // NOTE: We should not need to have a real listen()ing socket - bind() // should be enough, but I can't figure out a way to e2e test without // it. Tools like 'ss' and 'netstat' do not show sockets that are // bind()ed but not listen()ed, and at least the default debian netcat // has no way to avoid about 10 seconds of retries. socket, err := newProxySocket(protocol, ip, port) if err != nil { return fmt.Errorf("can't open node port for %s: %v", key.String(), err) } proxier.portMap[key] = &portMapValue{owner: owner, socket: socket} glog.V(2).Infof("Claimed local port %s", key.String()) return nil } if existing.owner == owner { // We are idempotent return nil } return fmt.Errorf("Port conflict detected on port %s. %v vs %v", key.String(), owner, existing) } // Release a claim on a port. Returns an error if the owner does not match the claim. // Tolerates release on an unclaimed port, to simplify . func (proxier *Proxier) releaseNodePort(ip net.IP, port int, protocol api.Protocol, owner proxy.ServicePortName) error { proxier.portMapMutex.Lock() defer proxier.portMapMutex.Unlock() key := portMapKey{ip: ip.String(), port: port, protocol: protocol} existing, found := proxier.portMap[key] if !found { // We tolerate this, it happens if we are cleaning up a failed allocation glog.Infof("Ignoring release on unowned port: %v", key) return nil } if existing.owner != owner { return fmt.Errorf("Port conflict detected on port %v (unowned unlock). %v vs %v", key, owner, existing) } delete(proxier.portMap, key) existing.socket.Close() return nil } func (proxier *Proxier) openNodePort(nodePort int, protocol api.Protocol, proxyIP net.IP, proxyPort int, name proxy.ServicePortName) error { // TODO: Do we want to allow containers to access public services? Probably yes. // TODO: We could refactor this to be the same code as portal, but with IP == nil err := proxier.claimNodePort(nil, nodePort, protocol, name) if err != nil { return err } // Handle traffic from containers. args := proxier.iptablesContainerNodePortArgs(nodePort, protocol, proxyIP, proxyPort, name) existed, err := proxier.iptables.EnsureRule(iptables.Append, iptables.TableNAT, iptablesContainerNodePortChain, args...) if err != nil { glog.Errorf("Failed to install iptables %s rule for service %q", iptablesContainerNodePortChain, name) return err } if !existed { glog.Infof("Opened iptables from-containers public port for service %q on %s port %d", name, protocol, nodePort) } // Handle traffic from the host. args = proxier.iptablesHostNodePortArgs(nodePort, protocol, proxyIP, proxyPort, name) existed, err = proxier.iptables.EnsureRule(iptables.Append, iptables.TableNAT, iptablesHostNodePortChain, args...) if err != nil { glog.Errorf("Failed to install iptables %s rule for service %q", iptablesHostNodePortChain, name) return err } if !existed { glog.Infof("Opened iptables from-host public port for service %q on %s port %d", name, protocol, nodePort) } return nil } func (proxier *Proxier) closePortal(service proxy.ServicePortName, info *serviceInfo) error { // Collect errors and report them all at the end. el := proxier.closeOnePortal(info.portal, info.protocol, proxier.listenIP, info.proxyPort, service) for _, publicIP := range info.externalIPs { el = append(el, proxier.closeOnePortal(portal{net.ParseIP(publicIP), info.portal.port, true}, info.protocol, proxier.listenIP, info.proxyPort, service)...) } for _, ingress := range info.loadBalancerStatus.Ingress { if ingress.IP != "" { el = append(el, proxier.closeOnePortal(portal{net.ParseIP(ingress.IP), info.portal.port, false}, info.protocol, proxier.listenIP, info.proxyPort, service)...) } } if info.nodePort != 0 { el = append(el, proxier.closeNodePort(info.nodePort, info.protocol, proxier.listenIP, info.proxyPort, service)...) } if len(el) == 0 { glog.V(3).Infof("Closed iptables portals for service %q", service) } else { glog.Errorf("Some errors closing iptables portals for service %q", service) } return errors.NewAggregate(el) } func (proxier *Proxier) closeOnePortal(portal portal, protocol api.Protocol, proxyIP net.IP, proxyPort int, name proxy.ServicePortName) []error { el := []error{} if local, err := isLocalIP(portal.ip); err != nil { el = append(el, fmt.Errorf("can't determine if IP is local, assuming not: %v", err)) } else if local { if err := proxier.releaseNodePort(nil, portal.port, protocol, name); err != nil { el = append(el, err) } } // Handle traffic from containers. args := proxier.iptablesContainerPortalArgs(portal.ip, portal.isExternal, false, portal.port, protocol, proxyIP, proxyPort, name) if err := proxier.iptables.DeleteRule(iptables.TableNAT, iptablesContainerPortalChain, args...); err != nil { glog.Errorf("Failed to delete iptables %s rule for service %q", iptablesContainerPortalChain, name) el = append(el, err) } if portal.isExternal { args := proxier.iptablesContainerPortalArgs(portal.ip, false, true, portal.port, protocol, proxyIP, proxyPort, name) if err := proxier.iptables.DeleteRule(iptables.TableNAT, iptablesContainerPortalChain, args...); err != nil { glog.Errorf("Failed to delete iptables %s rule for service %q", iptablesContainerPortalChain, name) el = append(el, err) } args = proxier.iptablesHostPortalArgs(portal.ip, true, portal.port, protocol, proxyIP, proxyPort, name) if err := proxier.iptables.DeleteRule(iptables.TableNAT, iptablesHostPortalChain, args...); err != nil { glog.Errorf("Failed to delete iptables %s rule for service %q", iptablesHostPortalChain, name) el = append(el, err) } return el } // Handle traffic from the host (portalIP is not external). args = proxier.iptablesHostPortalArgs(portal.ip, false, portal.port, protocol, proxyIP, proxyPort, name) if err := proxier.iptables.DeleteRule(iptables.TableNAT, iptablesHostPortalChain, args...); err != nil { glog.Errorf("Failed to delete iptables %s rule for service %q", iptablesHostPortalChain, name) el = append(el, err) } return el } func (proxier *Proxier) closeNodePort(nodePort int, protocol api.Protocol, proxyIP net.IP, proxyPort int, name proxy.ServicePortName) []error { el := []error{} // Handle traffic from containers. args := proxier.iptablesContainerNodePortArgs(nodePort, protocol, proxyIP, proxyPort, name) if err := proxier.iptables.DeleteRule(iptables.TableNAT, iptablesContainerNodePortChain, args...); err != nil { glog.Errorf("Failed to delete iptables %s rule for service %q", iptablesContainerNodePortChain, name) el = append(el, err) } // Handle traffic from the host. args = proxier.iptablesHostNodePortArgs(nodePort, protocol, proxyIP, proxyPort, name) if err := proxier.iptables.DeleteRule(iptables.TableNAT, iptablesHostNodePortChain, args...); err != nil { glog.Errorf("Failed to delete iptables %s rule for service %q", iptablesHostNodePortChain, name) el = append(el, err) } if err := proxier.releaseNodePort(nil, nodePort, protocol, name); err != nil { el = append(el, err) } return el } func isLocalIP(ip net.IP) (bool, error) { addrs, err := net.InterfaceAddrs() if err != nil { return false, err } for i := range addrs { intf, _, err := net.ParseCIDR(addrs[i].String()) if err != nil { return false, err } if ip.Equal(intf) { return true, nil } } return false, nil } // See comments in the *PortalArgs() functions for some details about why we // use two chains for portals. var iptablesContainerPortalChain iptables.Chain = "KUBE-PORTALS-CONTAINER" var iptablesHostPortalChain iptables.Chain = "KUBE-PORTALS-HOST" // Chains for NodePort services var iptablesContainerNodePortChain iptables.Chain = "KUBE-NODEPORT-CONTAINER" var iptablesHostNodePortChain iptables.Chain = "KUBE-NODEPORT-HOST" // Ensure that the iptables infrastructure we use is set up. This can safely be called periodically. func iptablesInit(ipt iptables.Interface) error { // TODO: There is almost certainly room for optimization here. E.g. If // we knew the service-cluster-ip-range CIDR we could fast-track outbound packets not // destined for a service. There's probably more, help wanted. // Danger - order of these rules matters here: // // We match portal rules first, then NodePort rules. For NodePort rules, we filter primarily on --dst-type LOCAL, // because we want to listen on all local addresses, but don't match internet traffic with the same dst port number. // // There is one complication (per thockin): // -m addrtype --dst-type LOCAL is what we want except that it is broken (by intent without foresight to our usecase) // on at least GCE. Specifically, GCE machines have a daemon which learns what external IPs are forwarded to that // machine, and configure a local route for that IP, making a match for --dst-type LOCAL when we don't want it to. // Removing the route gives correct behavior until the daemon recreates it. // Killing the daemon is an option, but means that any non-kubernetes use of the machine with external IP will be broken. // // This applies to IPs on GCE that are actually from a load-balancer; they will be categorized as LOCAL. // _If_ the chains were in the wrong order, and the LB traffic had dst-port == a NodePort on some other service, // the NodePort would take priority (incorrectly). // This is unlikely (and would only affect outgoing traffic from the cluster to the load balancer, which seems // doubly-unlikely), but we need to be careful to keep the rules in the right order. args := []string{ /* service-cluster-ip-range matching could go here */ } args = append(args, "-m", "comment", "--comment", "handle ClusterIPs; NOTE: this must be before the NodePort rules") if _, err := ipt.EnsureChain(iptables.TableNAT, iptablesContainerPortalChain); err != nil { return err } if _, err := ipt.EnsureRule(iptables.Prepend, iptables.TableNAT, iptables.ChainPrerouting, append(args, "-j", string(iptablesContainerPortalChain))...); err != nil { return err } if _, err := ipt.EnsureChain(iptables.TableNAT, iptablesHostPortalChain); err != nil { return err } if _, err := ipt.EnsureRule(iptables.Prepend, iptables.TableNAT, iptables.ChainOutput, append(args, "-j", string(iptablesHostPortalChain))...); err != nil {<|fim▁hole|> // This set of rules matches broadly (addrtype & destination port), and therefore must come after the portal rules args = []string{"-m", "addrtype", "--dst-type", "LOCAL"} args = append(args, "-m", "comment", "--comment", "handle service NodePorts; NOTE: this must be the last rule in the chain") if _, err := ipt.EnsureChain(iptables.TableNAT, iptablesContainerNodePortChain); err != nil { return err } if _, err := ipt.EnsureRule(iptables.Append, iptables.TableNAT, iptables.ChainPrerouting, append(args, "-j", string(iptablesContainerNodePortChain))...); err != nil { return err } if _, err := ipt.EnsureChain(iptables.TableNAT, iptablesHostNodePortChain); err != nil { return err } if _, err := ipt.EnsureRule(iptables.Append, iptables.TableNAT, iptables.ChainOutput, append(args, "-j", string(iptablesHostNodePortChain))...); err != nil { return err } // TODO: Verify order of rules. return nil } // Flush all of our custom iptables rules. func iptablesFlush(ipt iptables.Interface) error { el := []error{} if err := ipt.FlushChain(iptables.TableNAT, iptablesContainerPortalChain); err != nil { el = append(el, err) } if err := ipt.FlushChain(iptables.TableNAT, iptablesHostPortalChain); err != nil { el = append(el, err) } if err := ipt.FlushChain(iptables.TableNAT, iptablesContainerNodePortChain); err != nil { el = append(el, err) } if err := ipt.FlushChain(iptables.TableNAT, iptablesHostNodePortChain); err != nil { el = append(el, err) } if len(el) != 0 { glog.Errorf("Some errors flushing old iptables portals: %v", el) } return errors.NewAggregate(el) } // Used below. var zeroIPv4 = net.ParseIP("0.0.0.0") var localhostIPv4 = net.ParseIP("127.0.0.1") var zeroIPv6 = net.ParseIP("::0") var localhostIPv6 = net.ParseIP("::1") // Build a slice of iptables args that are common to from-container and from-host portal rules. func iptablesCommonPortalArgs(destIP net.IP, addPhysicalInterfaceMatch bool, addDstLocalMatch bool, destPort int, protocol api.Protocol, service proxy.ServicePortName) []string { // This list needs to include all fields as they are eventually spit out // by iptables-save. This is because some systems do not support the // 'iptables -C' arg, and so fall back on parsing iptables-save output. // If this does not match, it will not pass the check. For example: // adding the /32 on the destination IP arg is not strictly required, // but causes this list to not match the final iptables-save output. // This is fragile and I hope one day we can stop supporting such old // iptables versions. args := []string{ "-m", "comment", "--comment", service.String(), "-p", strings.ToLower(string(protocol)), "-m", strings.ToLower(string(protocol)), "--dport", fmt.Sprintf("%d", destPort), } if destIP != nil { args = append(args, "-d", fmt.Sprintf("%s/32", destIP.String())) } if addPhysicalInterfaceMatch { args = append(args, "-m", "physdev", "!", "--physdev-is-in") } if addDstLocalMatch { args = append(args, "-m", "addrtype", "--dst-type", "LOCAL") } return args } // Build a slice of iptables args for a from-container portal rule. func (proxier *Proxier) iptablesContainerPortalArgs(destIP net.IP, addPhysicalInterfaceMatch bool, addDstLocalMatch bool, destPort int, protocol api.Protocol, proxyIP net.IP, proxyPort int, service proxy.ServicePortName) []string { args := iptablesCommonPortalArgs(destIP, addPhysicalInterfaceMatch, addDstLocalMatch, destPort, protocol, service) // This is tricky. // // If the proxy is bound (see Proxier.listenIP) to 0.0.0.0 ("any // interface") we want to use REDIRECT, which sends traffic to the // "primary address of the incoming interface" which means the container // bridge, if there is one. When the response comes, it comes from that // same interface, so the NAT matches and the response packet is // correct. This matters for UDP, since there is no per-connection port // number. // // The alternative would be to use DNAT, except that it doesn't work // (empirically): // * DNAT to 127.0.0.1 = Packets just disappear - this seems to be a // well-known limitation of iptables. // * DNAT to eth0's IP = Response packets come from the bridge, which // breaks the NAT, and makes things like DNS not accept them. If // this could be resolved, it would simplify all of this code. // // If the proxy is bound to a specific IP, then we have to use DNAT to // that IP. Unlike the previous case, this works because the proxy is // ONLY listening on that IP, not the bridge. // // Why would anyone bind to an address that is not inclusive of // localhost? Apparently some cloud environments have their public IP // exposed as a real network interface AND do not have firewalling. We // don't want to expose everything out to the world. // // Unfortunately, I don't know of any way to listen on some (N > 1) // interfaces but not ALL interfaces, short of doing it manually, and // this is simpler than that. // // If the proxy is bound to localhost only, all of this is broken. Not // allowed. if proxyIP.Equal(zeroIPv4) || proxyIP.Equal(zeroIPv6) { // TODO: Can we REDIRECT with IPv6? args = append(args, "-j", "REDIRECT", "--to-ports", fmt.Sprintf("%d", proxyPort)) } else { // TODO: Can we DNAT with IPv6? args = append(args, "-j", "DNAT", "--to-destination", net.JoinHostPort(proxyIP.String(), strconv.Itoa(proxyPort))) } return args } // Build a slice of iptables args for a from-host portal rule. func (proxier *Proxier) iptablesHostPortalArgs(destIP net.IP, addDstLocalMatch bool, destPort int, protocol api.Protocol, proxyIP net.IP, proxyPort int, service proxy.ServicePortName) []string { args := iptablesCommonPortalArgs(destIP, false, addDstLocalMatch, destPort, protocol, service) // This is tricky. // // If the proxy is bound (see Proxier.listenIP) to 0.0.0.0 ("any // interface") we want to do the same as from-container traffic and use // REDIRECT. Except that it doesn't work (empirically). REDIRECT on // localpackets sends the traffic to localhost (special case, but it is // documented) but the response comes from the eth0 IP (not sure why, // truthfully), which makes DNS unhappy. // // So we have to use DNAT. DNAT to 127.0.0.1 can't work for the same // reason. // // So we do our best to find an interface that is not a loopback and // DNAT to that. This works (again, empirically). // // If the proxy is bound to a specific IP, then we have to use DNAT to // that IP. Unlike the previous case, this works because the proxy is // ONLY listening on that IP, not the bridge. // // If the proxy is bound to localhost only, this should work, but we // don't allow it for now. if proxyIP.Equal(zeroIPv4) || proxyIP.Equal(zeroIPv6) { proxyIP = proxier.hostIP } // TODO: Can we DNAT with IPv6? args = append(args, "-j", "DNAT", "--to-destination", net.JoinHostPort(proxyIP.String(), strconv.Itoa(proxyPort))) return args } // Build a slice of iptables args for a from-container public-port rule. // See iptablesContainerPortalArgs // TODO: Should we just reuse iptablesContainerPortalArgs? func (proxier *Proxier) iptablesContainerNodePortArgs(nodePort int, protocol api.Protocol, proxyIP net.IP, proxyPort int, service proxy.ServicePortName) []string { args := iptablesCommonPortalArgs(nil, false, false, nodePort, protocol, service) if proxyIP.Equal(zeroIPv4) || proxyIP.Equal(zeroIPv6) { // TODO: Can we REDIRECT with IPv6? args = append(args, "-j", "REDIRECT", "--to-ports", fmt.Sprintf("%d", proxyPort)) } else { // TODO: Can we DNAT with IPv6? args = append(args, "-j", "DNAT", "--to-destination", net.JoinHostPort(proxyIP.String(), strconv.Itoa(proxyPort))) } return args } // Build a slice of iptables args for a from-host public-port rule. // See iptablesHostPortalArgs // TODO: Should we just reuse iptablesHostPortalArgs? func (proxier *Proxier) iptablesHostNodePortArgs(nodePort int, protocol api.Protocol, proxyIP net.IP, proxyPort int, service proxy.ServicePortName) []string { args := iptablesCommonPortalArgs(nil, false, false, nodePort, protocol, service) if proxyIP.Equal(zeroIPv4) || proxyIP.Equal(zeroIPv6) { proxyIP = proxier.hostIP } // TODO: Can we DNAT with IPv6? args = append(args, "-j", "DNAT", "--to-destination", net.JoinHostPort(proxyIP.String(), strconv.Itoa(proxyPort))) return args } func isTooManyFDsError(err error) bool { return strings.Contains(err.Error(), "too many open files") } func isClosedError(err error) bool { // A brief discussion about handling closed error here: // https://code.google.com/p/go/issues/detail?id=4373#c14 // TODO: maybe create a stoppable TCP listener that returns a StoppedError return strings.HasSuffix(err.Error(), "use of closed network connection") }<|fim▁end|>
return err }
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import shutil import tempfile import numpy as np import os from os.path import getsize import pytest import yaml from util import PATH_TO_TESTS, seed, dummy_predict_with_threshold PATH_TO_ASSETS = os.path.join(PATH_TO_TESTS, 'assets') PATH_TO_RETINA_DIR = os.path.join(PATH_TO_ASSETS, 'recordings', 'retina') PATH_TO_RETINA_CONFIG_DIR = os.path.join(PATH_TO_RETINA_DIR, 'config') @pytest.fixture(autouse=True) def setup(): seed(0) @pytest.fixture def patch_triage_network(monkeypatch): to_patch = 'yass.neuralnetwork.model.KerasModel.predict_with_threshold' monkeypatch.setattr(to_patch, dummy_predict_with_threshold) yield def _path_to_config(): return os.path.join(PATH_TO_RETINA_CONFIG_DIR, 'config.yaml')<|fim▁hole|> with open(_path_to_config()) as f: d = yaml.load(f) return d @pytest.fixture() def data_info(): return _data_info() @pytest.fixture() def data(): info = _data_info()['recordings'] path = os.path.join(PATH_TO_RETINA_DIR, 'data.bin') d = np.fromfile(path, dtype=info['dtype']) n_observations = int(getsize(path) / info['n_channels'] / np.dtype(info['dtype']).itemsize) d = d.reshape(n_observations, info['n_channels']) return d @pytest.fixture() def path_to_tests(): return PATH_TO_TESTS @pytest.fixture() def path_to_performance(): return os.path.join(PATH_TO_TESTS, 'performance/') @pytest.fixture def make_tmp_folder(): temp = tempfile.mkdtemp() yield temp shutil.rmtree(temp) @pytest.fixture() def path_to_data(): return os.path.join(PATH_TO_RETINA_DIR, 'data.bin') @pytest.fixture() def path_to_geometry(): return os.path.join(PATH_TO_RETINA_DIR, 'geometry.npy') @pytest.fixture() def path_to_sample_pipeline_folder(): return os.path.join(PATH_TO_RETINA_DIR, 'sample_pipeline_output') @pytest.fixture() def path_to_standardized_data(): return os.path.join(PATH_TO_RETINA_DIR, 'sample_pipeline_output', 'preprocess', 'standardized.bin') @pytest.fixture() def path_to_output_reference(): return os.path.join(PATH_TO_ASSETS, 'output_reference') @pytest.fixture def path_to_config(): return _path_to_config() @pytest.fixture def path_to_config_threshold(): return os.path.join(PATH_TO_RETINA_CONFIG_DIR, 'config_threshold.yaml') @pytest.fixture def path_to_config_with_wrong_channels(): return os.path.join(PATH_TO_RETINA_CONFIG_DIR, 'wrong_channels.yaml') @pytest.fixture def path_to_txt_geometry(): return os.path.join(PATH_TO_ASSETS, 'test_files', 'geometry.txt') @pytest.fixture def path_to_npy_geometry(): return os.path.join(PATH_TO_ASSETS, 'test_files', 'geometry.npy')<|fim▁end|>
def _data_info():
<|file_name|>pareto_node.hpp<|end_file_name|><|fim▁begin|>#ifndef PARETO_NODE_HPP #define PARETO_NODE_HPP #include <core/forward_sampleable_node.hpp> #include <helper/node_counter.hpp> #include <string><|fim▁hole|> namespace mcmc_utilities { template <typename T,template <typename TE> class T_vector> class pareto_node :public forward_sampleable_node<T,T_vector> { private: public: pareto_node() :forward_sampleable_node<T,T_vector>(2,0) {} private: T do_log_prob()const override { T x=this->value(0); T c=this->parent(0); T alpha=this->parent(1); return logdpar<T,T>(x,c,alpha); } bool is_continuous(size_t)const override { return true; } std::pair<T,T> do_var_range()const override { T c=this->parent(0); T alpha=this->parent(1); T eta=1e-4; return std::make_pair(c,c*std::pow(eta,-(T)(1.)/alpha)); } void do_init_value(size_t n) override { this->set_value(0,this->parent(0)); } std::shared_ptr<node<T,T_vector> > do_clone()const override { auto p=new pareto_node; for(size_t i=0;i<this->num_of_dims();++i) { p->set_observed(i,this->is_observed(i)); p->set_value(i,this->value(i)); } return std::shared_ptr<node<T,T_vector> >(p); } }; template <typename T,template <typename TE> class T_vector> class pareto_node_factory :public abstract_node_factory<T,T_vector> { public: pareto_node_factory() :abstract_node_factory<T,T_vector>({"mu","sigma"},{"x"},{}) {} public: std::shared_ptr<node<T,T_vector> > do_get_node(const T_vector<T>& hparam)const override { return std::shared_ptr<node<T,T_vector> >(new pareto_node<T,T_vector>); } std::string do_get_node_type()const override { return std::string("stochastic node"); } }; } #endif<|fim▁end|>
#include <helper/abstract_node_factory.hpp> #include <math/distributions.hpp>
<|file_name|>bitcoin-wallet.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2016-2020 The Bitcoin Core developers // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #if defined(HAVE_CONFIG_H) #include <config/bitcoin-config.h> #endif #include <chainparams.h> #include <chainparamsbase.h> #include <logging.h> #include <util/system.h> #include <util/translation.h> #include <util/url.h> #include <wallet/wallettool.h> #include <functional> const std::function<std::string(const char*)> G_TRANSLATION_FUN = nullptr; UrlDecodeFn* const URL_DECODE = nullptr; static void SetupWalletToolArgs(ArgsManager& argsman) { SetupHelpOptions(argsman); SetupChainParamsBaseOptions(argsman); argsman.AddArg("-datadir=<dir>", "Specify data directory", ArgsManager::ALLOW_ANY, OptionsCategory::OPTIONS); argsman.AddArg("-wallet=<wallet-name>", "Specify wallet name", ArgsManager::ALLOW_ANY | ArgsManager::NETWORK_ONLY, OptionsCategory::OPTIONS); argsman.AddArg("-debug=<category>", "Output debugging information (default: 0).", ArgsManager::ALLOW_ANY, OptionsCategory::DEBUG_TEST); argsman.AddArg("-printtoconsole", "Send trace/debug info to console (default: 1 when no -debug is true, 0 otherwise).", ArgsManager::ALLOW_ANY, OptionsCategory::DEBUG_TEST); argsman.AddArg("info", "Get wallet info", ArgsManager::ALLOW_ANY, OptionsCategory::COMMANDS); argsman.AddArg("create", "Create new wallet file", ArgsManager::ALLOW_ANY, OptionsCategory::COMMANDS); argsman.AddArg("salvage", "Attempt to recover private keys from a corrupt wallet. Warning: 'salvage' is experimental.", ArgsManager::ALLOW_ANY, OptionsCategory::COMMANDS); } static bool WalletAppInit(int argc, char* argv[]) { SetupWalletToolArgs(gArgs); std::string error_message; if (!gArgs.ParseParameters(argc, argv, error_message)) { tfm::format(std::cerr, "Error parsing command line arguments: %s\n", error_message); return false; } if (argc < 2 || HelpRequested(gArgs)) { std::string usage = strprintf("%s elements-wallet version", PACKAGE_NAME) + " " + FormatFullVersion() + "\n\n" + "elements-wallet is an offline tool for creating and interacting with " PACKAGE_NAME " wallet files.\n" + "By default elements-wallet will act on wallets in the default mainnet wallet directory in the datadir.\n" +<|fim▁hole|> "To change the target wallet, use the -datadir, -wallet and -testnet/-regtest arguments.\n\n" + "Usage:\n" + " elements-wallet [options] <command>\n\n" + gArgs.GetHelpMessage(); tfm::format(std::cout, "%s", usage); return false; } // check for printtoconsole, allow -debug LogInstance().m_print_to_console = gArgs.GetBoolArg("-printtoconsole", gArgs.GetBoolArg("-debug", false)); if (!CheckDataDirOption()) { tfm::format(std::cerr, "Error: Specified data directory \"%s\" does not exist.\n", gArgs.GetArg("-datadir", "")); return false; } // Check for chain settings (Params() calls are only valid after this clause) SelectParams(gArgs.GetChainName()); return true; } int main(int argc, char* argv[]) { #ifdef WIN32 util::WinCmdLineArgs winArgs; std::tie(argc, argv) = winArgs.get(); #endif SetupEnvironment(); RandomInit(); try { if (!WalletAppInit(argc, argv)) return EXIT_FAILURE; } catch (const std::exception& e) { PrintExceptionContinue(&e, "WalletAppInit()"); return EXIT_FAILURE; } catch (...) { PrintExceptionContinue(nullptr, "WalletAppInit()"); return EXIT_FAILURE; } std::string method {}; for(int i = 1; i < argc; ++i) { if (!IsSwitchChar(argv[i][0])) { if (!method.empty()) { tfm::format(std::cerr, "Error: two methods provided (%s and %s). Only one method should be provided.\n", method, argv[i]); return EXIT_FAILURE; } method = argv[i]; } } if (method.empty()) { tfm::format(std::cerr, "No method provided. Run `elements-wallet -help` for valid methods.\n"); return EXIT_FAILURE; } // A name must be provided when creating a file if (method == "create" && !gArgs.IsArgSet("-wallet")) { tfm::format(std::cerr, "Wallet name must be provided when creating a new wallet.\n"); return EXIT_FAILURE; } std::string name = gArgs.GetArg("-wallet", ""); ECCVerifyHandle globalVerifyHandle; ECC_Start(); if (!WalletTool::ExecuteWalletToolFunc(method, name)) return EXIT_FAILURE; ECC_Stop(); return EXIT_SUCCESS; }<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from setuptools import setup, find_packages<|fim▁hole|>setup(name='conwhat', #version=versioneer.get_version(), description='python library for connectome-based white matter atlas analyses in neuroimaging', long_description='python library for connectome-based white matter atlas analyses in neuroimaging', keywords='white matter, tractography, MRI, DTI, diffusion, python', author='John David Griffiths', author_email='[email protected]', url='https://github.com/JohnGriffiths/conwhat', packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]), install_requires=['numpy', 'setuptools'], classifiers=[ 'Intended Audience :: Science/Research', 'Programming Language :: Python', 'Topic :: Software Development', 'Topic :: Scientific/Engineering', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: Unix', 'Operating System :: MacOS', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', ], entry_points={ "console_scripts": [ "conwhat=conwhat.__main__:main", ] }, #cmdclass=versioneer.get_cmdclass() )<|fim▁end|>
import versioneer
<|file_name|>sha1.rs<|end_file_name|><|fim▁begin|>use std::num::Wrapping; use byteorder::ByteOrder; use byteorder::ReadBytesExt; use byteorder::WriteBytesExt; use byteorder::BigEndian as BE; static K1: u32 = 0x5A827999u32; static K2: u32 = 0x6ED9EBA1u32; static K3: u32 = 0x8F1BBCDCu32; static K4: u32 = 0xCA62C1D6u32; #[inline] fn circular_shift(bits: u32, Wrapping(word): Wrapping<u32>) -> u32 { word << (bits as usize) | word >> ((32u32 - bits) as usize) } #[allow(unused_must_use)] pub fn sha1(message: &[u8]) -> Vec<u8> { let mut hash: [u32; 5] = [0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476,<|fim▁hole|> 0xC3D2E1F0]; let mut msg = message.to_vec(); let msg_bit_len = msg.len() * 8; let offset = (msg.len() * 8) % 512; if offset < 448 { msg.push(128u8); for _ in 0..(448 - (offset + 8)) / 8 { msg.push(0u8); } } else if offset >= 448 { msg.push(128u8); for _ in 0..(512 - (offset + 8)) / 8 + 56 { msg.push(0u8); } } msg.write_u64::<BE>(msg_bit_len as u64); for i in 0..(msg.len() * 8 / 512) { let mut w = [0u32; 80]; let part = &msg[i * 64..(i+1) * 64]; { let mut reader = &part[..]; for j in 0usize..16 { w[j] = reader.read_u32::<BE>().unwrap(); } } for j in 16usize..80 { let val = w[j - 3] ^ w[j - 8] ^ w[j - 14] ^ w[j - 16]; w[j] = circular_shift(1, Wrapping(val)); } let mut a = Wrapping(hash[0]); let mut b = Wrapping(hash[1]); let mut c = Wrapping(hash[2]); let mut d = Wrapping(hash[3]); let mut e = Wrapping(hash[4]); let mut temp: Wrapping<u32>; for t in 0usize..20 { temp = Wrapping(circular_shift(5, a)) + (b & c | !b & d) + e + Wrapping(w[t]) + Wrapping(K1); e = d; d = c; c = Wrapping(circular_shift(30, b)); b = a; a = temp; } for t in 20usize..40 { temp = Wrapping(circular_shift(5, a)) + (b ^ c ^ d) + e + Wrapping(w[t]) + Wrapping(K2); e = d; d = c; c = Wrapping(circular_shift(30, b)); b = a; a = temp; } for t in 40usize..60 { temp = Wrapping(circular_shift(5, a)) + (b & c | b & d | c & d) + e + Wrapping(w[t]) + Wrapping(K3); e = d; d = c; c = Wrapping(circular_shift(30, b)); b = a; a = temp; } for t in 60usize..80 { temp = Wrapping(circular_shift(5, a)) + (b ^ c ^ d) + e + Wrapping(w[t]) + Wrapping(K4); e = d; d = c; c = Wrapping(circular_shift(30, b)); b = a; a = temp; } hash[0] = { let Wrapping(x) = Wrapping(hash[0]) + a; x}; hash[1] = { let Wrapping(x) = Wrapping(hash[1]) + b; x}; hash[2] = { let Wrapping(x) = Wrapping(hash[2]) + c; x}; hash[3] = { let Wrapping(x) = Wrapping(hash[3]) + d; x}; hash[4] = { let Wrapping(x) = Wrapping(hash[4]) + e; x}; } let mut output = Vec::with_capacity(20); output.write_u32::<BE>(hash[0]); output.write_u32::<BE>(hash[1]); output.write_u32::<BE>(hash[2]); output.write_u32::<BE>(hash[3]); output.write_u32::<BE>(hash[4]); output } #[cfg(test)] mod test { pub use super::sha1; #[test] fn should_compute_sha1_hash() { assert_eq!(sha1(&[115u8, 104u8, 97u8]), vec![0xd8u8, 0xf4u8, 0x59u8, 0x03u8, 0x20u8, 0xe1u8, 0x34u8, 0x3au8, 0x91u8, 0x5bu8, 0x63u8, 0x94u8, 0x17u8, 0x06u8, 0x50u8, 0xa8u8, 0xf3u8, 0x5du8, 0x69u8, 0x26u8]); assert_eq!(sha1(&[65u8; 57]), vec![0xe8u8, 0xd6u8, 0xeau8, 0x5cu8, 0x62u8, 0x7fu8, 0xc8u8, 0x67u8, 0x6fu8, 0xa6u8, 0x62u8, 0x67u8, 0x7bu8, 0x02u8, 0x86u8, 0x40u8, 0x84u8, 0x4du8, 0xc3u8, 0x5cu8]); assert_eq!(sha1(&[65u8; 56]), vec![0x6bu8, 0x45u8, 0xe3u8, 0xcfu8, 0x1eu8, 0xb3u8, 0x32u8, 0x4bu8, 0x9fu8, 0xd4u8, 0xdfu8, 0x3bu8, 0x83u8, 0xd8u8, 0x9cu8, 0x4cu8, 0x2cu8, 0x4cu8, 0xa8u8, 0x96u8]); assert_eq!(sha1(&[65u8; 64]), vec![0x30u8, 0xb8u8, 0x6eu8, 0x44u8, 0xe6u8, 0x00u8, 0x14u8, 0x03u8, 0x82u8, 0x7au8, 0x62u8, 0xc5u8, 0x8bu8, 0x08u8, 0x89u8, 0x3eu8, 0x77u8, 0xcfu8, 0x12u8, 0x1fu8]); assert_eq!(sha1(&[65u8; 65]), vec![0x82u8, 0x6bu8, 0x7eu8, 0x7au8, 0x7au8, 0xf8u8, 0xa5u8, 0x29u8, 0xaeu8, 0x1cu8, 0x74u8, 0x43u8, 0xc2u8, 0x3bu8, 0xf1u8, 0x85u8, 0xc0u8, 0xadu8, 0x44u8, 0x0cu8]); } }<|fim▁end|>
<|file_name|>constants_darwin_64.go<|end_file_name|><|fim▁begin|>// // Copyright 2014-2017 Cristian Maglie. All rights reserved. // Use of this source code is governed by a BSD-style license // that can be found in the LICENSE file. // //go:build darwin && (amd64 || arm64) // +build darwin // +build amd64 arm64 package serial import "golang.org/x/sys/unix" // termios manipulation functions <|fim▁hole|> 0: unix.B9600, // Default to 9600 50: unix.B50, 75: unix.B75, 110: unix.B110, 134: unix.B134, 150: unix.B150, 200: unix.B200, 300: unix.B300, 600: unix.B600, 1200: unix.B1200, 1800: unix.B1800, 2400: unix.B2400, 4800: unix.B4800, 9600: unix.B9600, 19200: unix.B19200, 38400: unix.B38400, 57600: unix.B57600, 115200: unix.B115200, 230400: unix.B230400, } var databitsMap = map[int]uint64{ 0: unix.CS8, // Default to 8 bits 5: unix.CS5, 6: unix.CS6, 7: unix.CS7, 8: unix.CS8, } const tcCMSPAR uint64 = 0 // may be CMSPAR or PAREXT const tcIUCLC uint64 = 0 const tcCCTS_OFLOW uint64 = 0x00010000 const tcCRTS_IFLOW uint64 = 0x00020000 const tcCRTSCTS = tcCCTS_OFLOW | tcCRTS_IFLOW func toTermiosSpeedType(speed uint64) uint64 { return speed }<|fim▁end|>
var baudrateMap = map[int]uint64{
<|file_name|>su-popup.spec.js<|end_file_name|><|fim▁begin|>import * as riot from 'riot' import { init, compile } from '../../helpers/' import TargetComponent from '../../../dist/tags/popup/su-popup.js' describe('su-popup', function () { let element, component let spyOnMouseover, spyOnMouseout init(riot) const mount = opts => { const option = Object.assign({ 'onmouseover': spyOnMouseover, 'onmouseout': spyOnMouseout, }, opts) element = document.createElement('app') riot.register('su-popup', TargetComponent) const AppComponent = compile(` <app> <su-popup tooltip="{ props.tooltip }" data-title="{ props.dataTitle }" data-variation="{ props.dataVariation }" onmouseover="{ () => dispatch('mouseover') }" onmouseout="{ () => dispatch('mouseout') }" ><i class="add icon"></i></su-popup> </app>`) riot.register('app', AppComponent) component = riot.mount(element, option)[0] } beforeEach(function () { spyOnMouseover = sinon.spy() spyOnMouseout = sinon.spy() }) afterEach(function () {<|fim▁hole|> riot.unregister('app') }) it('is mounted', function () { mount() expect(component).to.be.ok }) it('show and hide popup', function () { mount({ tooltip: 'Add users to your feed' }) expect(component.$('.content').innerHTML).to.equal('Add users to your feed') expect(component.$('su-popup .ui.popup').classList.contains('nowrap')).to.equal(true) fireEvent(component.$('su-popup .ui.popup'), 'mouseover') expect(spyOnMouseover).to.have.been.calledOnce expect(component.$('su-popup .ui.popup').classList.contains('visible')).to.equal(true) expect(component.$('su-popup .ui.popup').classList.contains('hidden')).to.equal(false) fireEvent(component.$('su-popup .ui.popup'), 'mouseout') expect(spyOnMouseout).to.have.been.calledOnce expect(component.$('su-popup .ui.popup').classList.contains('visible')).to.equal(false) expect(component.$('su-popup .ui.popup').classList.contains('hidden')).to.equal(true) }) it('header', function () { mount({ tooltip: 'Add users to your feed', dataTitle: 'Title' }) expect(component.$('.header').innerHTML).to.equal('Title') expect(component.$('.content').innerHTML).to.equal('Add users to your feed') }) it('wide', function () { mount({ tooltip: 'Add users to your feed', dataVariation: 'wide' }) expect(component.$('su-popup .ui.popup').classList.contains('wide')).to.equal(true) expect(component.$('su-popup .ui.popup').classList.contains('nowrap')).to.equal(false) }) })<|fim▁end|>
riot.unregister('su-popup')
<|file_name|>self_assessment_module.py<|end_file_name|><|fim▁begin|>import json import logging from lxml import etree from xmodule.capa_module import ComplexEncoder from xmodule.progress import Progress from xmodule.stringify import stringify_children import openendedchild from .combined_open_ended_rubric import CombinedOpenEndedRubric log = logging.getLogger("edx.courseware") class SelfAssessmentModule(openendedchild.OpenEndedChild): """ A Self Assessment module that allows students to write open-ended responses, submit, then see a rubric and rate themselves. Persists student supplied hints, answers, and assessment judgment (currently only correct/incorrect). Parses xml definition file--see below for exact format. Sample XML format: <selfassessment> <hintprompt> What hint about this problem would you give to someone? </hintprompt> <submitmessage> Save Succcesful. Thanks for participating! </submitmessage> </selfassessment> """ TEMPLATE_DIR = "combinedopenended/selfassessment" # states INITIAL = 'initial' ASSESSING = 'assessing' REQUEST_HINT = 'request_hint' DONE = 'done' def setup_response(self, system, location, definition, descriptor): """ Sets up the module @param system: Modulesystem @param location: location, to let the module know where it is. @param definition: XML definition of the module. @param descriptor: SelfAssessmentDescriptor @return: None """ self.child_prompt = stringify_children(self.child_prompt) self.child_rubric = stringify_children(self.child_rubric) def get_html(self, system): """ Gets context and renders HTML that represents the module @param system: Modulesystem @return: Rendered HTML """ # set context variables and render template previous_answer = self.get_display_answer() # Use the module name as a unique id to pass to the template. try: module_id = self.system.location.name except AttributeError: # In cases where we don't have a system or a location, use a fallback. module_id = "self_assessment" context = { 'prompt': self.child_prompt, 'previous_answer': previous_answer, 'ajax_url': system.ajax_url, 'initial_rubric': self.get_rubric_html(system), 'state': self.child_state, 'allow_reset': self._allow_reset(), 'child_type': 'selfassessment', 'accept_file_upload': self.accept_file_upload, 'module_id': module_id, } html = system.render_template('{0}/self_assessment_prompt.html'.format(self.TEMPLATE_DIR), context) return html def handle_ajax(self, dispatch, data, system): """ This is called by courseware.module_render, to handle an AJAX call. "data" is request.POST. Returns a json dictionary: { 'progress_changed' : True/False, 'progress': 'none'/'in_progress'/'done', <other request-specific values here > } """ handlers = { 'save_answer': self.save_answer, 'save_assessment': self.save_assessment, 'save_post_assessment': self.save_hint, 'store_answer': self.store_answer, } if dispatch not in handlers: # This is a dev_facing_error log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch)) # This is a dev_facing_error return json.dumps({'error': 'Error handling action. Please try again.', 'success': False}) before = self.get_progress() d = handlers[dispatch](data, system) after = self.get_progress() d.update({ 'progress_changed': after != before, 'progress_status': Progress.to_js_status_str(after),<|fim▁hole|> return json.dumps(d, cls=ComplexEncoder) def get_rubric_html(self, system): """ Return the appropriate version of the rubric, based on the state. """ if self.child_state == self.INITIAL: return '' rubric_renderer = CombinedOpenEndedRubric(system, False) rubric_dict = rubric_renderer.render_rubric(self.child_rubric) success = rubric_dict['success'] rubric_html = rubric_dict['html'] # we'll render it context = { 'rubric': rubric_html, 'max_score': self._max_score, } if self.child_state == self.ASSESSING: context['read_only'] = False elif self.child_state in (self.POST_ASSESSMENT, self.DONE): context['read_only'] = True else: # This is a dev_facing_error raise ValueError("Self assessment module is in an illegal state '{0}'".format(self.child_state)) return system.render_template('{0}/self_assessment_rubric.html'.format(self.TEMPLATE_DIR), context) def get_hint_html(self, system): """ Return the appropriate version of the hint view, based on state. """ if self.child_state in (self.INITIAL, self.ASSESSING): return '' if self.child_state == self.DONE: # display the previous hint latest = self.latest_post_assessment(system) hint = latest if latest is not None else '' else: hint = '' context = {'hint': hint} if self.child_state == self.POST_ASSESSMENT: context['read_only'] = False elif self.child_state == self.DONE: context['read_only'] = True else: # This is a dev_facing_error raise ValueError("Self Assessment module is in an illegal state '{0}'".format(self.child_state)) return system.render_template('{0}/self_assessment_hint.html'.format(self.TEMPLATE_DIR), context) def save_answer(self, data, system): """ After the answer is submitted, show the rubric. Args: data: the request dictionary passed to the ajax request. Should contain a key 'student_answer' Returns: Dictionary with keys 'success' and either 'error' (if not success), or 'rubric_html' (if success). """ # Check to see if this problem is closed closed, msg = self.check_if_closed() if closed: return msg if self.child_state != self.INITIAL: return self.out_of_sync_error(data) error_message = "" # add new history element with answer and empty score and hint. success, error_message, data = self.append_file_link_to_student_answer(data) if success: data['student_answer'] = SelfAssessmentModule.sanitize_html(data['student_answer']) self.new_history_entry(data['student_answer']) self.change_state(self.ASSESSING) return { 'success': success, 'rubric_html': self.get_rubric_html(system), 'error': error_message, 'student_response': data['student_answer'].replace("\n","<br/>") } def save_assessment(self, data, _system): """ Save the assessment. If the student said they're right, don't ask for a hint, and go straight to the done state. Otherwise, do ask for a hint. Returns a dict { 'success': bool, 'state': state, 'hint_html': hint_html OR 'message_html': html and 'allow_reset', 'error': error-msg}, with 'error' only present if 'success' is False, and 'hint_html' or 'message_html' only if success is true :param data: A `webob.multidict.MultiDict` containing the keys asasssment: The sum of assessment scores score_list[]: A multivalue key containing all the individual scores """ closed, msg = self.check_if_closed() if closed: return msg if self.child_state != self.ASSESSING: return self.out_of_sync_error(data) try: score = int(data.get('assessment')) score_list = [int(x) for x in data.getall('score_list[]')] except (ValueError, TypeError): # This is a dev_facing_error log.error("Non-integer score value passed to save_assessment, or no score list present.") # This is a student_facing_error _ = self.system.service(self, "i18n").ugettext return { 'success': False, 'error': _("Error saving your score. Please notify course staff.") } # Record score as assessment and rubric scores as post assessment self.record_latest_score(score) self.record_latest_post_assessment(json.dumps(score_list)) d = {'success': True, } self.change_state(self.DONE) d['allow_reset'] = self._allow_reset() d['state'] = self.child_state return d def save_hint(self, data, _system): ''' Not used currently, as hints have been removed from the system. Save the hint. Returns a dict { 'success': bool, 'message_html': message_html, 'error': error-msg, 'allow_reset': bool}, with the error key only present if success is False and message_html only if True. ''' if self.child_state != self.POST_ASSESSMENT: # Note: because we only ask for hints on wrong answers, may not have # the same number of hints and answers. return self.out_of_sync_error(data) self.record_latest_post_assessment(data['hint']) self.change_state(self.DONE) return { 'success': True, 'message_html': '', 'allow_reset': self._allow_reset(), } def latest_post_assessment(self, system): latest_post_assessment = super(SelfAssessmentModule, self).latest_post_assessment(system) try: rubric_scores = json.loads(latest_post_assessment) except: rubric_scores = [] return [rubric_scores] class SelfAssessmentDescriptor(): """ Module for adding self assessment questions to courses """ mako_template = "widgets/html-edit.html" module_class = SelfAssessmentModule filename_extension = "xml" has_score = True def __init__(self, system): self.system = system @classmethod def definition_from_xml(cls, xml_object, system): """ Pull out the rubric, prompt, and submitmessage into a dictionary. Returns: { 'submitmessage': 'some-html' 'hintprompt': 'some-html' } """ expected_children = [] for child in expected_children: if len(xml_object.xpath(child)) != 1: # This is a staff_facing_error raise ValueError( u"Self assessment definition must include exactly one '{0}' tag. Contact the learning sciences group for assistance.".format( child)) def parse(k): """Assumes that xml_object has child k""" return stringify_children(xml_object.xpath(k)[0]) return {} def definition_to_xml(self, resource_fs): '''Return an xml element representing this definition.''' elt = etree.Element('selfassessment') def add_child(k): child_str = u'<{tag}>{body}</{tag}>'.format(tag=k, body=getattr(self, k)) child_node = etree.fromstring(child_str) elt.append(child_node) for child in []: add_child(child) return elt<|fim▁end|>
})
<|file_name|>0007_auto__add_messagerecord.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'MessageRecord' db.create_table(u'nuntium_messagerecord', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('status', self.gf('django.db.models.fields.CharField')(max_length=255)), ('datetime', self.gf('django.db.models.fields.DateField')(default=datetime.datetime(2013, 4, 24, 0, 0))), ('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])), ('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()), )) db.send_create_signal(u'nuntium', ['MessageRecord']) def backwards(self, orm): # Deleting model 'MessageRecord' db.delete_table(u'nuntium_messagerecord') models = { u'contactos.contact': { 'Meta': {'object_name': 'Contact'}, 'contact_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contactos.ContactType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'person': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['popit.Person']"}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '512'}) }, u'contactos.contacttype': { 'Meta': {'object_name': 'ContactType'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'label_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})<|fim▁hole|> 'content': ('django.db.models.fields.TextField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': "'4'"}), 'subject': ('django.db.models.fields.CharField', [], {'max_length': '512'}), 'writeitinstance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['nuntium.WriteItInstance']"}) }, u'nuntium.messagerecord': { 'Meta': {'object_name': 'MessageRecord'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), 'datetime': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 4, 24, 0, 0)'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}), 'status': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, u'nuntium.outboundmessage': { 'Meta': {'object_name': 'OutboundMessage'}, 'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contactos.Contact']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['nuntium.Message']"}) }, u'nuntium.writeitinstance': { 'Meta': {'object_name': 'WriteItInstance'}, 'api_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['popit.ApiInstance']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'slug': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, u'popit.apiinstance': { 'Meta': {'object_name': 'ApiInstance'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'url': ('popit.fields.ApiInstanceURLField', [], {'unique': 'True', 'max_length': '200'}) }, u'popit.person': { 'Meta': {'object_name': 'Person'}, 'api_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['popit.ApiInstance']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'popit_url': ('popit.fields.PopItURLField', [], {'default': "''", 'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'}) } } complete_apps = ['nuntium']<|fim▁end|>
}, u'nuntium.message': { 'Meta': {'object_name': 'Message'},
<|file_name|>utils.spec.ts<|end_file_name|><|fim▁begin|>import * as utils from '../utils'; <|fim▁hole|><|fim▁end|>
// Test Utils Functions
<|file_name|>Department.java<|end_file_name|><|fim▁begin|>package org.softlang.service.company; import java.io.Serializable; import java.util.LinkedList; import java.util.List; /** * A department has a name, a manager, employees, and subdepartments. */ public class Department implements Serializable { private static final long serialVersionUID = -2008895922177165250L; private String name; private Employee manager; private List<Department> subdepts = new LinkedList<Department>(); private List<Employee> employees = new LinkedList<Employee>(); public String getName() { return name; } public void setName(String name) { this.name = name; } public Employee getManager() { return manager; } public void setManager(Employee manager) { this.manager = manager; } public List<Department> getSubdepts() { return subdepts; } public List<Employee> getEmployees() { return employees; } public double total() { double total = 0; total += getManager().getSalary(); for (Department s : getSubdepts()) total += s.total(); for (Employee e : getEmployees()) total += e.getSalary(); return total; } public void cut() { getManager().cut(); for (Department s : getSubdepts()) s.cut(); for (Employee e : getEmployees())<|fim▁hole|> e.cut(); } }<|fim▁end|>
<|file_name|>contact-group.js<|end_file_name|><|fim▁begin|><|fim▁hole|> function (Model) { var ContactGroup = GO.extend(Model, function () { //rename function because this record has a delete attribute on the server this.deleteRecord = this.delete; this.$parent.constructor.call(this, arguments); }); ContactGroup.prototype.getStoreRoute = function () { return 'contacts/'+this.contactId+'/permissions'; }; ContactGroup.prototype.$keys = ['groupId']; return ContactGroup; }]);<|fim▁end|>
'use strict'; angular.module('GO.Modules.GroupOffice.Contacts').factory('GO.Modules.GroupOffice.Contacts.Model.ContactGroup', [ 'GO.Core.Factories.Data.Model',
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>/*istanbul ignore next*/'use strict'; var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var _request2 = require('request'); var _request3 = _interopRequireDefault(_request2); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } /** * A set of utlity methods. * @abstract */ var EgoJSUtils = (function () { function EgoJSUtils() { _classCallCheck(this, EgoJSUtils); } _createClass(EgoJSUtils, null, [{ key: '_fullfilledPromise', /** * Returns an already fullfilled promise with a given value. * @param {bollean} success - Whether to call `resolve` or `reject`. * @param {*} response - The object to resolve or reject. * @return {Promise<*,*>} * @private * @ignore */ value: function _fullfilledPromise(success, response) { return new Promise(function (resolve, reject) { if (success) { resolve(response); } else { reject(response); } }); } /** * Returns an already rejected promise. * @example * EgoJSUtils.rejectedPromise('error message').catch((e) => { * // It will log 'error message' * console.log(e); * }); * * @param {*} response - The object to send to the `.catch` method. * @return {Promise<null, *>} This promise won't call `.then` but `.catch` directly. */ }, { key: 'rejectedPromise', value: function rejectedPromise(response) { return this._fullfilledPromise(false, response); } /** * Returns an already resolved promise. * @example * EgoJSUtils.rejectedPromise('hello world').then((message) => { * // It will log 'hello world' * console.log(message); * }); * * @param {*} response - The object to send to the `.then` method. * @return {Promise<*, null>} This promise won't call `.catch`. */ }, { key: 'resolvedPromise', value: function resolvedPromise(response) { return this._fullfilledPromise(true, response); } /** * It will merge a given list of Objects into a new one. It works recursively, so any "sub * objects" will also be merged. This method returns a new Object, so none of the targets will * be modified. * @example * const a = { * b: 'c', * d: { * e: 'f', * g: { * h: ['i'], * }, * }, * j: 'k', * }; * const b = { * j: 'key', * d: { * g: { * h: ['x', 'y', 'z'], * l: 'm', * }, * }, * }; * // The result will be * // { * // b: 'c', * // d: { * // e: 'f', * // g: { * // h: ['x', 'y', 'z'], * // l: 'm', * // }, * // }, * // j: 'key', * // } * ._mergeObjects(a, b); * * @param {...Object} objects - The list of objects to merge. * @return {Object} A new object with the merged properties. */ }, { key: 'mergeObjects', value: function mergeObjects() { /*istanbul ignore next*/ var _this = this; var result = {}; /*istanbul ignore next*/ for (var _len = arguments.length, objects = Array(_len), _key = 0; _key < _len; _key++) { objects[_key] = arguments[_key]; } objects.forEach(function (obj) { if (typeof obj !== 'undefined') { Object.keys(obj).forEach(function (objKey) { var current = obj[objKey]; var target = result[objKey]; if (typeof target !== 'undefined' && current.constructor === Object && target.constructor === Object) { result[objKey] = /*istanbul ignore next*/_this.mergeObjects(target, current); } else { result[objKey] = current; } }, /*istanbul ignore next*/_this); } }, this); return result; } /** * Wraps a Request call into a Promise. * @example * request({uri: 'https://homer0.com/rosario'}) * .then((response) => doSomething(response)) * .catch((err) => handleErrors(err)); * * @param {Object} data The request settings. The same you would use with request(). * @return {Promise<Object, Error>} It will be resolved or rejected depending on the response. */ }, { key: 'request', value: function request(data) { return new Promise(function (resolve, reject) { /*istanbul ignore next*/(0, _request3.default)(data, function (err, httpResponse, body) { if (err) { reject(err); } else { resolve(body); } }); }); } }]);<|fim▁hole|>/*istanbul ignore next*/exports.default = EgoJSUtils;<|fim▁end|>
return EgoJSUtils; })();
<|file_name|>yahoo.py<|end_file_name|><|fim▁begin|># coding: utf-8 from __future__ import unicode_literals import itertools import json import re from .common import InfoExtractor, SearchInfoExtractor from ..compat import ( compat_urllib_parse, compat_urlparse, ) from ..utils import ( clean_html, determine_ext, ExtractorError, extract_attributes, int_or_none, mimetype2ext, smuggle_url, unescapeHTML, ) from .brightcove import ( BrightcoveLegacyIE, BrightcoveNewIE, ) from .nbc import NBCSportsVPlayerIE class YahooIE(InfoExtractor): IE_DESC = 'Yahoo screen and movies' _VALID_URL = r'(?P<host>https?://(?:(?P<country>[a-zA-Z]{2})\.)?[\da-zA-Z_-]+\.yahoo\.com)/(?:[^/]+/)*(?:(?P<display_id>.+)?-)?(?P<id>[0-9]+)(?:-[a-z]+)?(?:\.html)?' _TESTS = [ { 'url': 'http://screen.yahoo.com/julian-smith-travis-legg-watch-214727115.html', 'info_dict': { 'id': '2d25e626-2378-391f-ada0-ddaf1417e588', 'ext': 'mp4', 'title': 'Julian Smith & Travis Legg Watch Julian Smith', 'description': 'Julian and Travis watch Julian Smith', 'duration': 6863, }, }, { 'url': 'http://screen.yahoo.com/wired/codefellas-s1-ep12-cougar-lies-103000935.html', 'md5': '251af144a19ebc4a033e8ba91ac726bb', 'info_dict': { 'id': 'd1dedf8c-d58c-38c3-8963-e899929ae0a9', 'ext': 'mp4', 'title': 'Codefellas - The Cougar Lies with Spanish Moss', 'description': 'md5:66b627ab0a282b26352136ca96ce73c1', 'duration': 151, }, 'skip': 'HTTP Error 404', }, { 'url': 'https://screen.yahoo.com/community/community-sizzle-reel-203225340.html?format=embed', 'md5': '7993e572fac98e044588d0b5260f4352', 'info_dict': { 'id': '4fe78544-8d48-39d8-97cd-13f205d9fcdb', 'ext': 'mp4', 'title': "Yahoo Saves 'Community'", 'description': 'md5:4d4145af2fd3de00cbb6c1d664105053', 'duration': 170, } }, { 'url': 'https://tw.news.yahoo.com/%E6%95%A2%E5%95%8F%E5%B8%82%E9%95%B7%20%E9%BB%83%E7%A7%80%E9%9C%9C%E6%89%B9%E8%B3%B4%E6%B8%85%E5%BE%B7%20%E9%9D%9E%E5%B8%B8%E9%AB%98%E5%82%B2-034024051.html', 'md5': '45c024bad51e63e9b6f6fad7a43a8c23', 'info_dict': { 'id': 'cac903b3-fcf4-3c14-b632-643ab541712f', 'ext': 'mp4', 'title': '敢問市長/黃秀霜批賴清德「非常高傲」', 'description': '直言台南沒捷運 交通居五都之末', 'duration': 396, }, }, { 'url': 'https://uk.screen.yahoo.com/editor-picks/cute-raccoon-freed-drain-using-091756545.html', 'md5': '71298482f7c64cbb7fa064e4553ff1c1', 'info_dict': { 'id': 'b3affa53-2e14-3590-852b-0e0db6cd1a58', 'ext': 'webm', 'title': 'Cute Raccoon Freed From Drain\u00a0Using Angle Grinder', 'description': 'md5:f66c890e1490f4910a9953c941dee944', 'duration': 97, } }, { 'url': 'https://ca.sports.yahoo.com/video/program-makes-hockey-more-affordable-013127711.html', 'md5': '57e06440778b1828a6079d2f744212c4', 'info_dict': { 'id': 'c9fa2a36-0d4d-3937-b8f6-cc0fb1881e73', 'ext': 'mp4', 'title': 'Program that makes hockey more affordable not offered in Manitoba', 'description': 'md5:c54a609f4c078d92b74ffb9bf1f496f4', 'duration': 121, }, 'skip': 'Video gone', }, { 'url': 'https://ca.finance.yahoo.com/news/hackers-sony-more-trouble-well-154609075.html', 'info_dict': { 'id': '154609075', }, 'playlist': [{ 'md5': '000887d0dc609bc3a47c974151a40fb8', 'info_dict': { 'id': 'e624c4bc-3389-34de-9dfc-025f74943409', 'ext': 'mp4', 'title': '\'The Interview\' TV Spot: War', 'description': 'The Interview', 'duration': 30, }, }, { 'md5': '81bc74faf10750fe36e4542f9a184c66', 'info_dict': { 'id': '1fc8ada0-718e-3abe-a450-bf31f246d1a9', 'ext': 'mp4', 'title': '\'The Interview\' TV Spot: Guys', 'description': 'The Interview', 'duration': 30, }, }], }, { 'url': 'http://news.yahoo.com/video/china-moses-crazy-blues-104538833.html', 'md5': '88e209b417f173d86186bef6e4d1f160', 'info_dict': { 'id': 'f885cf7f-43d4-3450-9fac-46ac30ece521', 'ext': 'mp4', 'title': 'China Moses Is Crazy About the Blues', 'description': 'md5:9900ab8cd5808175c7b3fe55b979bed0', 'duration': 128, } }, { 'url': 'https://in.lifestyle.yahoo.com/video/connect-dots-dark-side-virgo-090247395.html', 'md5': 'd9a083ccf1379127bf25699d67e4791b', 'info_dict': { 'id': '52aeeaa3-b3d1-30d8-9ef8-5d0cf05efb7c', 'ext': 'mp4', 'title': 'Connect the Dots: Dark Side of Virgo', 'description': 'md5:1428185051cfd1949807ad4ff6d3686a', 'duration': 201, }, 'skip': 'Domain name in.lifestyle.yahoo.com gone', }, { 'url': 'https://www.yahoo.com/movies/v/true-story-trailer-173000497.html', 'md5': '989396ae73d20c6f057746fb226aa215', 'info_dict': { 'id': '071c4013-ce30-3a93-a5b2-e0413cd4a9d1', 'ext': 'mp4', 'title': '\'True Story\' Trailer', 'description': 'True Story', 'duration': 150, }, }, { 'url': 'https://gma.yahoo.com/pizza-delivery-man-surprised-huge-tip-college-kids-195200785.html', 'only_matching': True, }, { 'note': 'NBC Sports embeds', 'url': 'http://sports.yahoo.com/blogs/ncaab-the-dagger/tyler-kalinoski-s-buzzer-beater-caps-davidson-s-comeback-win-185609842.html?guid=nbc_cbk_davidsonbuzzerbeater_150313', 'info_dict': { 'id': '9CsDKds0kvHI', 'ext': 'flv', 'description': 'md5:df390f70a9ba7c95ff1daace988f0d8d', 'title': 'Tyler Kalinoski hits buzzer-beater to lift Davidson', 'upload_date': '20150313', 'uploader': 'NBCU-SPORTS', 'timestamp': 1426270238, } }, { 'url': 'https://tw.news.yahoo.com/-100120367.html', 'only_matching': True, }, { # Query result is embedded in webpage, but explicit request to video API fails with geo restriction 'url': 'https://screen.yahoo.com/community/communitary-community-episode-1-ladders-154501237.html', 'md5': '4fbafb9c9b6f07aa8f870629f6671b35', 'info_dict': { 'id': '1f32853c-a271-3eef-8cb6-f6d6872cb504', 'ext': 'mp4', 'title': 'Communitary - Community Episode 1: Ladders', 'description': 'md5:8fc39608213295748e1e289807838c97', 'duration': 1646, }, }, { # it uses an alias to get the video_id 'url': 'https://www.yahoo.com/movies/the-stars-of-daddys-home-have-very-different-212843197.html', 'info_dict': { 'id': '40eda9c8-8e5f-3552-8745-830f67d0c737', 'ext': 'mp4', 'title': 'Will Ferrell & Mark Wahlberg Are Pro-Spanking', 'description': 'While they play feuding fathers in \'Daddy\'s Home,\' star Will Ferrell & Mark Wahlberg share their true feelings on parenthood.', }, }, { # config['models']['applet_model']['data']['sapi'] has no query 'url': 'https://www.yahoo.com/music/livenation/event/galactic-2016', 'md5': 'dac0c72d502bc5facda80c9e6d5c98db', 'info_dict': { 'id': 'a6015640-e9e5-3efb-bb60-05589a183919', 'ext': 'mp4', 'description': 'Galactic', 'title': 'Dolla Diva (feat. Maggie Koerner)', }, 'skip': 'redirect to https://www.yahoo.com/music', }, { # yahoo://article/ 'url': 'https://www.yahoo.com/movies/video/true-story-trailer-173000497.html', 'info_dict': { 'id': '071c4013-ce30-3a93-a5b2-e0413cd4a9d1', 'ext': 'mp4', 'title': "'True Story' Trailer", 'description': 'True Story', }, 'params': { 'skip_download': True, }, }, { # ytwnews://cavideo/ 'url': 'https://tw.video.yahoo.com/movie-tw/單車天使-中文版預-092316541.html', 'info_dict': { 'id': 'ba133ff2-0793-3510-b636-59dfe9ff6cff', 'ext': 'mp4', 'title': '單車天使 - 中文版預', 'description': '中文版預', }, 'params': { 'skip_download': True, }, }, { # custom brightcove 'url': 'https://au.tv.yahoo.com/plus7/sunrise/-/watch/37083565/clown-entertainers-say-it-is-hurting-their-business/', 'info_dict': { 'id': '5575377707001', 'ext': 'mp4', 'title': "Clown entertainers say 'It' is hurting their business", 'description': 'Stephen King s horror film has much to answer for. Jelby and Mr Loopy the Clowns join us.', 'timestamp': 1505341164, 'upload_date': '20170913', 'uploader_id': '2376984109001', }, 'params': { 'skip_download': True, }, }, { # custom brightcove, geo-restricted to Australia, bypassable 'url': 'https://au.tv.yahoo.com/plus7/sunrise/-/watch/37263964/sunrise-episode-wed-27-sep/', 'only_matching': True, } ] def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) page_id = mobj.group('id') display_id = mobj.group('display_id') or page_id host = mobj.group('host') webpage, urlh = self._download_webpage_handle(url, display_id) if 'err=404' in urlh.geturl(): raise ExtractorError('Video gone', expected=True) # Look for iframed media first entries = [] iframe_urls = re.findall(r'<iframe[^>]+src="(/video/.+?-\d+\.html\?format=embed.*?)"', webpage) for idx, iframe_url in enumerate(iframe_urls): entries.append(self.url_result(host + iframe_url, 'Yahoo')) if entries: return self.playlist_result(entries, page_id) # Look for NBCSports iframes nbc_sports_url = NBCSportsVPlayerIE._extract_url(webpage) if nbc_sports_url: return self.url_result(nbc_sports_url, NBCSportsVPlayerIE.ie_key()) # Look for Brightcove Legacy Studio embeds bc_url = BrightcoveLegacyIE._extract_brightcove_url(webpage) if bc_url: return self.url_result(bc_url, BrightcoveLegacyIE.ie_key()) def brightcove_url_result(bc_url): return self.url_result( smuggle_url(bc_url, {'geo_countries': [mobj.group('country')]}), BrightcoveNewIE.ie_key()) # Look for Brightcove New Studio embeds bc_url = BrightcoveNewIE._extract_url(self, webpage) if bc_url: return brightcove_url_result(bc_url) brightcove_iframe = self._search_regex( r'(<iframe[^>]+data-video-id=["\']\d+[^>]+>)', webpage, 'brightcove iframe', default=None) if brightcove_iframe: attr = extract_attributes(brightcove_iframe) src = attr.get('src') if src: parsed_src = compat_urlparse.urlparse(src) qs = compat_urlparse.parse_qs(parsed_src.query) account_id = qs.get('accountId', ['2376984109001'])[0] brightcove_id = attr.get('data-video-id') or qs.get('videoId', [None])[0] if account_id and brightcove_id:<|fim▁hole|> 'http://players.brightcove.net/%s/default_default/index.html?videoId=%s' % (account_id, brightcove_id)) # Query result is often embedded in webpage as JSON. Sometimes explicit requests # to video API results in a failure with geo restriction reason therefore using # embedded query result when present sounds reasonable. config_json = self._search_regex( r'window\.Af\.bootstrap\[[^\]]+\]\s*=\s*({.*?"applet_type"\s*:\s*"td-applet-videoplayer".*?});(?:</script>|$)', webpage, 'videoplayer applet', default=None) if config_json: config = self._parse_json(config_json, display_id, fatal=False) if config: sapi = config.get('models', {}).get('applet_model', {}).get('data', {}).get('sapi') if sapi and 'query' in sapi: info = self._extract_info(display_id, sapi, webpage) self._sort_formats(info['formats']) return info items_json = self._search_regex( r'mediaItems: ({.*?})$', webpage, 'items', flags=re.MULTILINE, default=None) if items_json is None: alias = self._search_regex( r'"aliases":{"video":"(.*?)"', webpage, 'alias', default=None) if alias is not None: alias_info = self._download_json( 'https://www.yahoo.com/_td/api/resource/VideoService.videos;video_aliases=["%s"]' % alias, display_id, 'Downloading alias info') video_id = alias_info[0]['id'] else: CONTENT_ID_REGEXES = [ r'YUI\.namespace\("Media"\)\.CONTENT_ID\s*=\s*"([^"]+)"', r'root\.App\.Cache\.context\.videoCache\.curVideo = \{"([^"]+)"', r'"first_videoid"\s*:\s*"([^"]+)"', r'%s[^}]*"ccm_id"\s*:\s*"([^"]+)"' % re.escape(page_id), r'<article[^>]data-uuid=["\']([^"\']+)', r'<meta[^<>]+yahoo://article/view\?.*\buuid=([^&"\']+)', r'<meta[^<>]+["\']ytwnews://cavideo/(?:[^/]+/)+([\da-fA-F-]+)[&"\']', ] video_id = self._search_regex( CONTENT_ID_REGEXES, webpage, 'content ID') else: items = json.loads(items_json) info = items['mediaItems']['query']['results']['mediaObj'][0] # The 'meta' field is not always in the video webpage, we request it # from another page video_id = info['id'] return self._get_info(video_id, display_id, webpage) def _extract_info(self, display_id, query, webpage): info = query['query']['results']['mediaObj'][0] meta = info.get('meta') video_id = info.get('id') if not meta: msg = info['status'].get('msg') if msg: raise ExtractorError( '%s returned error: %s' % (self.IE_NAME, msg), expected=True) raise ExtractorError('Unable to extract media object meta') formats = [] for s in info['streams']: tbr = int_or_none(s.get('bitrate')) format_info = { 'width': int_or_none(s.get('width')), 'height': int_or_none(s.get('height')), 'tbr': tbr, } host = s['host'] path = s['path'] if host.startswith('rtmp'): fmt = 'rtmp' format_info.update({ 'url': host, 'play_path': path, 'ext': 'flv', }) else: if s.get('format') == 'm3u8_playlist': fmt = 'hls' format_info.update({ 'protocol': 'm3u8_native', 'ext': 'mp4', }) else: fmt = format_info['ext'] = determine_ext(path) format_url = compat_urlparse.urljoin(host, path) format_info['url'] = format_url format_info['format_id'] = fmt + ('-%d' % tbr if tbr else '') formats.append(format_info) closed_captions = self._html_search_regex( r'"closedcaptions":(\[[^\]]+\])', webpage, 'closed captions', default='[]') cc_json = self._parse_json(closed_captions, video_id, fatal=False) subtitles = {} if cc_json: for closed_caption in cc_json: lang = closed_caption['lang'] if lang not in subtitles: subtitles[lang] = [] subtitles[lang].append({ 'url': closed_caption['url'], 'ext': mimetype2ext(closed_caption['content_type']), }) return { 'id': video_id, 'display_id': display_id, 'title': unescapeHTML(meta['title']), 'formats': formats, 'description': clean_html(meta['description']), 'thumbnail': meta['thumbnail'] if meta.get('thumbnail') else self._og_search_thumbnail(webpage), 'duration': int_or_none(meta.get('duration')), 'subtitles': subtitles, } def _get_info(self, video_id, display_id, webpage): region = self._search_regex( r'\\?"region\\?"\s*:\s*\\?"([^"]+?)\\?"', webpage, 'region', fatal=False, default='US').upper() formats = [] info = {} for fmt in ('webm', 'mp4'): query_result = self._download_json( 'https://video.media.yql.yahoo.com/v1/video/sapi/streams/' + video_id, display_id, 'Downloading %s video info' % fmt, query={ 'protocol': 'http', 'region': region, 'format': fmt, }) info = self._extract_info(display_id, query_result, webpage) formats.extend(info['formats']) formats.extend(self._extract_m3u8_formats( 'http://video.media.yql.yahoo.com/v1/hls/%s?region=%s' % (video_id, region), video_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False)) self._sort_formats(formats) info['formats'] = formats return info class YahooSearchIE(SearchInfoExtractor): IE_DESC = 'Yahoo screen search' _MAX_RESULTS = 1000 IE_NAME = 'screen.yahoo:search' _SEARCH_KEY = 'yvsearch' def _get_n_results(self, query, n): """Get a specified number of results for a query""" entries = [] for pagenum in itertools.count(0): result_url = 'http://video.search.yahoo.com/search/?p=%s&fr=screen&o=js&gs=0&b=%d' % (compat_urllib_parse.quote_plus(query), pagenum * 30) info = self._download_json(result_url, query, note='Downloading results page ' + str(pagenum + 1)) m = info['m'] results = info['results'] for (i, r) in enumerate(results): if (pagenum * 30) + i >= n: break mobj = re.search(r'(?P<url>screen\.yahoo\.com/.*?-\d*?\.html)"', r) e = self.url_result('http://' + mobj.group('url'), 'Yahoo') entries.append(e) if (pagenum * 30 + i >= n) or (m['last'] >= (m['total'] - 1)): break return { '_type': 'playlist', 'id': query, 'entries': entries, } class YahooGyaOPlayerIE(InfoExtractor): IE_NAME = 'yahoo:gyao:player' _VALID_URL = r'https?://(?:gyao\.yahoo\.co\.jp/(?:player|episode/[^/]+)|streaming\.yahoo\.co\.jp/c/y)/(?P<id>\d+/v\d+/v\d+|[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12})' _TESTS = [{ 'url': 'https://gyao.yahoo.co.jp/player/00998/v00818/v0000000000000008564/', 'info_dict': { 'id': '5993125228001', 'ext': 'mp4', 'title': 'フューリー 【字幕版】', 'description': 'md5:21e691c798a15330eda4db17a8fe45a5', 'uploader_id': '4235717419001', 'upload_date': '20190124', 'timestamp': 1548294365, }, 'params': { # m3u8 download 'skip_download': True, }, }, { 'url': 'https://streaming.yahoo.co.jp/c/y/01034/v00133/v0000000000000000706/', 'only_matching': True, }, { 'url': 'https://gyao.yahoo.co.jp/episode/%E3%81%8D%E3%81%AE%E3%81%86%E4%BD%95%E9%A3%9F%E3%81%B9%E3%81%9F%EF%BC%9F%20%E7%AC%AC2%E8%A9%B1%202019%2F4%2F12%E6%94%BE%E9%80%81%E5%88%86/5cb02352-b725-409e-9f8d-88f947a9f682', 'only_matching': True, }] def _real_extract(self, url): video_id = self._match_id(url).replace('/', ':') video = self._download_json( 'https://gyao.yahoo.co.jp/dam/v1/videos/' + video_id, video_id, query={ 'fields': 'longDescription,title,videoId', }) return { '_type': 'url_transparent', 'id': video_id, 'title': video['title'], 'url': smuggle_url( 'http://players.brightcove.net/4235717419001/default_default/index.html?videoId=' + video['videoId'], {'geo_countries': ['JP']}), 'description': video.get('longDescription'), 'ie_key': BrightcoveNewIE.ie_key(), } class YahooGyaOIE(InfoExtractor): IE_NAME = 'yahoo:gyao' _VALID_URL = r'https?://(?:gyao\.yahoo\.co\.jp/p|streaming\.yahoo\.co\.jp/p/y)/(?P<id>\d+/v\d+)' _TESTS = [{ 'url': 'https://gyao.yahoo.co.jp/p/00449/v03102/', 'info_dict': { 'id': '00449:v03102', }, 'playlist_count': 2, }, { 'url': 'https://streaming.yahoo.co.jp/p/y/01034/v00133/', 'only_matching': True, }] def _real_extract(self, url): program_id = self._match_id(url).replace('/', ':') videos = self._download_json( 'https://gyao.yahoo.co.jp/api/programs/%s/videos' % program_id, program_id)['videos'] entries = [] for video in videos: video_id = video.get('id') if not video_id: continue entries.append(self.url_result( 'https://gyao.yahoo.co.jp/player/%s/' % video_id.replace(':', '/'), YahooGyaOPlayerIE.ie_key(), video_id)) return self.playlist_result(entries, program_id)<|fim▁end|>
return brightcove_url_result(
<|file_name|>header.hpp<|end_file_name|><|fim▁begin|>#ifndef SELECT_ALIEN_HPP #define SELECT_ALIEN_HPP #include "animation.hpp" #include "game_object.hpp" #include "text.hpp" using namespace engine; class Header: public GameObject{ public: Header(double positionX, double positionY, int maxPapers, int stageNumber); ~Header(); void update(double timeElapsed); void draw(); void init(); void setAlienSelect(int select); Animation * getAnimation();<|fim▁hole|> Text* paper_text; Text* stage_text; int total_papers; //declareting variables total papers int alien_select; // declareting variables alien selection Text* convertToText(int newValue); void verifySelect(); }; #endif<|fim▁end|>
void updatePaperQuantity(int newValue); private: Animation* animator; Animation* paper_icon;
<|file_name|>data.js<|end_file_name|><|fim▁begin|>'use strict'; const TYPE = Symbol.for('type');<|fim▁hole|> class Data { constructor(options) { // File details this.filepath = options.filepath; // Type this[TYPE] = 'data'; // Data Object.assign(this, options.data); } } module.exports = Data;<|fim▁end|>
<|file_name|>workbenchTestServices.ts<|end_file_name|><|fim▁begin|>/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import { workbenchInstantiationService as browserWorkbenchInstantiationService, ITestInstantiationService, TestLifecycleService, TestFilesConfigurationService, TestFileService, TestFileDialogService, TestPathService } from 'vs/workbench/test/browser/workbenchTestServices'; import { Event } from 'vs/base/common/event'; import { ISharedProcessService } from 'vs/platform/ipc/electron-browser/sharedProcessService'; import { NativeWorkbenchEnvironmentService, INativeWorkbenchEnvironmentService } from 'vs/workbench/services/environment/electron-browser/environmentService'; import { NativeTextFileService, EncodingOracle, IEncodingOverride } from 'vs/workbench/services/textfile/electron-browser/nativeTextFileService'; import { IElectronService } from 'vs/platform/electron/node/electron'; import { INativeOpenDialogOptions } from 'vs/platform/dialogs/node/dialogs'; import { FileOperationError, IFileService } from 'vs/platform/files/common/files'; import { IUntitledTextEditorService } from 'vs/workbench/services/untitled/common/untitledTextEditorService'; import { ILifecycleService } from 'vs/platform/lifecycle/common/lifecycle'; import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation'; import { IModelService } from 'vs/editor/common/services/modelService'; import { IWorkbenchEnvironmentService } from 'vs/workbench/services/environment/common/environmentService'; import { IDialogService, IFileDialogService } from 'vs/platform/dialogs/common/dialogs'; import { ITextResourceConfigurationService } from 'vs/editor/common/services/textResourceConfigurationService'; import { IProductService } from 'vs/platform/product/common/productService'; import { IFilesConfigurationService } from 'vs/workbench/services/filesConfiguration/common/filesConfigurationService'; import { ITextModelService } from 'vs/editor/common/services/resolverService'; import { ICodeEditorService } from 'vs/editor/browser/services/codeEditorService'; import { URI } from 'vs/base/common/uri'; import { IReadTextFileOptions, ITextFileStreamContent, ITextFileService } from 'vs/workbench/services/textfile/common/textfiles'; import { createTextBufferFactoryFromStream } from 'vs/editor/common/model/textModel'; import { IOpenEmptyWindowOptions, IWindowOpenable, IOpenWindowOptions } from 'vs/platform/windows/common/windows'; import { parseArgs, OPTIONS } from 'vs/platform/environment/node/argv'; import { LogLevel, ILogService } from 'vs/platform/log/common/log'; import { IPathService } from 'vs/workbench/services/path/common/pathService'; import { IWorkingCopyFileService } from 'vs/workbench/services/workingCopy/common/workingCopyFileService'; import { UTF16le, UTF16be, UTF8_with_bom } from 'vs/base/node/encoding'; import { IWorkspaceContextService } from 'vs/platform/workspace/common/workspace'; import { ModelServiceImpl } from 'vs/editor/common/services/modelServiceImpl'; import { IBackupFileService } from 'vs/workbench/services/backup/common/backup'; import { NodeTestBackupFileService } from 'vs/workbench/services/backup/test/electron-browser/backupFileService.test'; import { IWorkingCopyService } from 'vs/workbench/services/workingCopy/common/workingCopyService'; import { IEditorService } from 'vs/workbench/services/editor/common/editorService'; import { INativeWindowConfiguration, IOpenedWindow } from 'vs/platform/windows/node/window'; import { TestContextService } from 'vs/workbench/test/common/workbenchTestServices'; export const TestWindowConfiguration: INativeWindowConfiguration = { windowId: 0, machineId: 'testMachineId', sessionId: 'testSessionId', logLevel: LogLevel.Error, mainPid: 0, partsSplashPath: '', appRoot: '', userEnv: {}, execPath: process.execPath, perfEntries: [], ...parseArgs(process.argv, OPTIONS) }; export const TestEnvironmentService = new NativeWorkbenchEnvironmentService(TestWindowConfiguration, process.execPath); export class TestTextFileService extends NativeTextFileService { private resolveTextContentError!: FileOperationError | null; constructor( @IFileService protected fileService: IFileService, @IUntitledTextEditorService untitledTextEditorService: IUntitledTextEditorService, @ILifecycleService lifecycleService: ILifecycleService, @IInstantiationService instantiationService: IInstantiationService, @IModelService modelService: IModelService, @IWorkbenchEnvironmentService environmentService: INativeWorkbenchEnvironmentService, @IDialogService dialogService: IDialogService, @IFileDialogService fileDialogService: IFileDialogService, @ITextResourceConfigurationService textResourceConfigurationService: ITextResourceConfigurationService, @IProductService productService: IProductService, @IFilesConfigurationService filesConfigurationService: IFilesConfigurationService, @ITextModelService textModelService: ITextModelService, @ICodeEditorService codeEditorService: ICodeEditorService, @IPathService athService: IPathService, @IWorkingCopyFileService workingCopyFileService: IWorkingCopyFileService, @ILogService logService: ILogService ) { super( fileService, untitledTextEditorService, lifecycleService, instantiationService, modelService, environmentService, dialogService, fileDialogService, textResourceConfigurationService, productService, filesConfigurationService, textModelService, codeEditorService, athService, workingCopyFileService, logService ); } setResolveTextContentErrorOnce(error: FileOperationError): void { this.resolveTextContentError = error; } async readStream(resource: URI, options?: IReadTextFileOptions): Promise<ITextFileStreamContent> { if (this.resolveTextContentError) { const error = this.resolveTextContentError; this.resolveTextContentError = null; throw error; } const content = await this.fileService.readFileStream(resource, options); return { resource: content.resource, name: content.name, mtime: content.mtime, ctime: content.ctime, etag: content.etag, encoding: 'utf8', value: await createTextBufferFactoryFromStream(content.value), size: 10 }; } } export class TestNativeTextFileServiceWithEncodingOverrides extends NativeTextFileService { private _testEncoding: TestEncodingOracle | undefined; get encoding(): TestEncodingOracle { if (!this._testEncoding) { this._testEncoding = this._register(this.instantiationService.createInstance(TestEncodingOracle)); } return this._testEncoding; } } class TestEncodingOracle extends EncodingOracle { protected get encodingOverrides(): IEncodingOverride[] { return [ { extension: 'utf16le', encoding: UTF16le }, { extension: 'utf16be', encoding: UTF16be }, { extension: 'utf8bom', encoding: UTF8_with_bom } ]; } protected set encodingOverrides(overrides: IEncodingOverride[]) { } } export class TestSharedProcessService implements ISharedProcessService { _serviceBrand: undefined; getChannel(channelName: string): any { return undefined; } registerChannel(channelName: string, channel: any): void { } async toggleSharedProcessWindow(): Promise<void> { } async whenSharedProcessReady(): Promise<void> { } } export class TestElectronService implements IElectronService { _serviceBrand: undefined; onWindowOpen: Event<number> = Event.None; onWindowMaximize: Event<number> = Event.None; onWindowUnmaximize: Event<number> = Event.None; onWindowFocus: Event<number> = Event.None; onWindowBlur: Event<number> = Event.None; windowCount = Promise.resolve(1); getWindowCount(): Promise<number> { return this.windowCount; } async getWindows(): Promise<IOpenedWindow[]> { return []; } async getActiveWindowId(): Promise<number | undefined> { return undefined; } openWindow(options?: IOpenEmptyWindowOptions): Promise<void>; openWindow(toOpen: IWindowOpenable[], options?: IOpenWindowOptions): Promise<void>; openWindow(arg1?: IOpenEmptyWindowOptions | IWindowOpenable[], arg2?: IOpenWindowOptions): Promise<void> { throw new Error('Method not implemented.'); } async toggleFullScreen(): Promise<void> { } async handleTitleDoubleClick(): Promise<void> { } async isMaximized(): Promise<boolean> { return true; } async maximizeWindow(): Promise<void> { } async unmaximizeWindow(): Promise<void> { } async minimizeWindow(): Promise<void> { } async focusWindow(options?: { windowId?: number | undefined; } | undefined): Promise<void> { } async showMessageBox(options: Electron.MessageBoxOptions): Promise<Electron.MessageBoxReturnValue> { throw new Error('Method not implemented.'); } async showSaveDialog(options: Electron.SaveDialogOptions): Promise<Electron.SaveDialogReturnValue> { throw new Error('Method not implemented.'); } async showOpenDialog(options: Electron.OpenDialogOptions): Promise<Electron.OpenDialogReturnValue> { throw new Error('Method not implemented.'); } async pickFileFolderAndOpen(options: INativeOpenDialogOptions): Promise<void> { } async pickFileAndOpen(options: INativeOpenDialogOptions): Promise<void> { } async pickFolderAndOpen(options: INativeOpenDialogOptions): Promise<void> { } async pickWorkspaceAndOpen(options: INativeOpenDialogOptions): Promise<void> { } async showItemInFolder(path: string): Promise<void> { } async setRepresentedFilename(path: string): Promise<void> { } async setDocumentEdited(edited: boolean): Promise<void> { } async openExternal(url: string): Promise<boolean> { return false; } async updateTouchBar(): Promise<void> { } async newWindowTab(): Promise<void> { } async showPreviousWindowTab(): Promise<void> { } async showNextWindowTab(): Promise<void> { } async moveWindowTabToNewWindow(): Promise<void> { } async mergeAllWindowTabs(): Promise<void> { } async toggleWindowTabsBar(): Promise<void> { } async relaunch(options?: { addArgs?: string[] | undefined; removeArgs?: string[] | undefined; } | undefined): Promise<void> { } async reload(): Promise<void> { } async closeWindow(): Promise<void> { } async closeWindowById(): Promise<void> { } async quit(): Promise<void> { } async openDevTools(options?: Electron.OpenDevToolsOptions | undefined): Promise<void> { } async toggleDevTools(): Promise<void> { } async startCrashReporter(options: Electron.CrashReporterStartOptions): Promise<void> { } async resolveProxy(url: string): Promise<string | undefined> { return undefined; } } export function workbenchInstantiationService(): ITestInstantiationService { const instantiationService = browserWorkbenchInstantiationService({ textFileService: insta => <ITextFileService>insta.createInstance(TestTextFileService), pathService: insta => <IPathService>insta.createInstance(TestNativePathService) }); instantiationService.stub(IElectronService, new TestElectronService()); return instantiationService; } export class TestServiceAccessor { constructor( @ILifecycleService public lifecycleService: TestLifecycleService, @ITextFileService public textFileService: TestTextFileService, @IFilesConfigurationService public filesConfigurationService: TestFilesConfigurationService, @IWorkspaceContextService public contextService: TestContextService, @IModelService public modelService: ModelServiceImpl, @IFileService public fileService: TestFileService, @IElectronService public electronService: TestElectronService, @IFileDialogService public fileDialogService: TestFileDialogService,<|fim▁hole|> @IWorkingCopyService public workingCopyService: IWorkingCopyService, @IEditorService public editorService: IEditorService ) { } } export class TestNativePathService extends TestPathService { _serviceBrand: undefined; constructor(@IWorkbenchEnvironmentService environmentService: INativeWorkbenchEnvironmentService) { super(environmentService.userHome); } }<|fim▁end|>
@IBackupFileService public backupFileService: NodeTestBackupFileService,
<|file_name|>MongodbManagerTest.java<|end_file_name|><|fim▁begin|>//package org.grain.mongo; // //import static org.junit.Assert.assertEquals; // //import java.util.ArrayList; //import java.util.List; //import java.util.UUID; // //import org.bson.conversions.Bson; //import org.junit.BeforeClass; //import org.junit.Test; // //import com.mongodb.client.model.Filters; // //public class MongodbManagerTest { // // @BeforeClass // public static void setUpBeforeClass() throws Exception { // MongodbManager.init("172.27.108.73", 27017, "test", "test", "test", null); // boolean result = MongodbManager.createCollection("test_table"); // if (!result) { // System.out.println("创建test_table失败"); // } // TestMongo testMongo = new TestMongo("111", "name"); // result = MongodbManager.insertOne("test_table", testMongo); // if (!result) { // System.out.println("插入TestMongo失败"); // } // } // // @Test // public void testCreateCollection() { // boolean result = MongodbManager.createCollection("test_table1"); // assertEquals(true, result); // } // // @Test // public void testInsertOne() { // TestMongo testMongo = new TestMongo(UUID.randomUUID().toString(), "name"); // boolean result = MongodbManager.insertOne("test_table", testMongo); // assertEquals(true, result); // } // // @Test // public void testInsertMany() { // TestMongo testMongo = new TestMongo(UUID.randomUUID().toString(), "name"); // TestMongo testMongo1 = new TestMongo(UUID.randomUUID().toString(), "name1"); // List<MongoObj> list = new ArrayList<>(); // list.add(testMongo); // list.add(testMongo1); // boolean result = MongodbManager.insertMany("test_table", list); // assertEquals(true, result); // } // // @Test // public void testFind() { // List<TestMongo> list = MongodbManager.find("test_table", null, TestMongo.class, 0, 0); // assertEquals(true, list.size() > 0); // } // // @Test // public void testDeleteById() { // TestMongo testMongo = new TestMongo("222", "name"); // boolean result = MongodbManager.insertOne("test_table", testMongo); // Bson filter = Filters.and(Filters.eq("id", "222")); // List<TestMongo> list = MongodbManager.find("test_table", filter, TestMongo.class, 0, 0); // testMongo = list.get(0); // result = MongodbManager.deleteById("test_table", testMongo); // assertEquals(true, result); // } // // @Test // public void testUpdateById() { // TestMongo testMongo = new TestMongo("333", "name"); // boolean result = MongodbManager.insertOne("test_table", testMongo); // Bson filter = Filters.and(Filters.eq("id", "333")); // List<TestMongo> list = MongodbManager.find("test_table", filter, TestMongo.class, 0, 0); // testMongo = list.get(0);<|fim▁hole|>// assertEquals(true, result); // } // // @Test // public void testCount() { // long count = MongodbManager.count("test_table", null); // assertEquals(true, count > 0); // } // //}<|fim▁end|>
// testMongo.setName("name" + UUID.randomUUID().toString()); // result = MongodbManager.updateById("test_table", testMongo);
<|file_name|>res_company.py<|end_file_name|><|fim▁begin|>from odoo import models, fields, api, _ # import odoo.tools as tools try: from pyafipws.iibb import IIBB except ImportError: IIBB = None # from pyafipws.padron import PadronAFIP from odoo.exceptions import UserError import logging import json import requests # from dateutil.relativedelta import relativedelta _logger = logging.getLogger(__name__) class ResCompany(models.Model): _inherit = "res.company" regimenes_ganancias_ids = fields.Many2many( 'afip.tabla_ganancias.alicuotasymontos', 'res_company_tabla_ganancias_rel', 'company_id', 'regimen_id', 'Regimenes Ganancia', ) agip_padron_type = fields.Selection([ ('regimenes_generales', 'Regímenes Generales')], string='Padron AGIP', default='regimenes_generales', ) agip_alicuota_no_sincripto_retencion = fields.Float( 'Agip: Alicuota no inscripto retención', ) agip_alicuota_no_sincripto_percepcion = fields.Float( 'Agip: Alicuota no inscripto percepción', ) arba_alicuota_no_sincripto_retencion = fields.Float( 'Arba: Alicuota no inscripto retención', ) arba_alicuota_no_sincripto_percepcion = fields.Float( 'Arba: Alicuota no inscripto percepción', ) cdba_alicuota_no_sincripto_retencion = fields.Float( 'Rentas Córdoba: Alícuota no inscripto retención' ) cdba_alicuota_no_sincripto_percepcion = fields.Float( 'Rentas Córdoba: Alícuota no inscripto percepción' ) def _localization_use_withholdings(self): """ Argentinian localization use documents """ self.ensure_one() return True if self.country_id == self.env.ref('base.ar') else super()._localization_use_withholdings() @api.model def _get_arba_environment_type(self): """ Function to define homologation/production environment First it search for a paramter "arba.ws.env.type" if exists and: * is production --> production * is homologation --> homologation Else Search for 'server_mode' parameter on conf file. If that parameter is: * 'test' or 'develop' --> homologation * other or no parameter --> production """ # como no se dispone de claves de homologacion usamos produccion # siempre environment_type = 'production' # parameter_env_type = self.env[ # 'ir.config_parameter'].sudo().get_param('arba.ws.env.type') # if parameter_env_type == 'production': # environment_type = 'production' # elif parameter_env_type == 'homologation': # environment_type = 'homologation' # else: # server_mode = tools.config.get('server_mode') # if not server_mode or server_mode == 'production': # environment_type = 'production' # else: # environment_type = 'homologation' # _logger.info( # 'Running arba WS on %s mode' % environment_type) return environment_type @api.model def get_arba_login_url(self, environment_type): if environment_type == 'production': arba_login_url = ( 'https://dfe.arba.gov.ar/DomicilioElectronico/' 'SeguridadCliente/dfeServicioConsulta.do') else: arba_login_url = ( 'https://dfe.test.arba.gov.ar/DomicilioElectronico' '/SeguridadCliente/dfeServicioConsulta.do') return arba_login_url def arba_connect(self): """ Method to be called """ self.ensure_one() cuit = self.partner_id.cuit_required() if not self.arba_cit: raise UserError(_( 'You must configure ARBA CIT on company %s') % ( self.name)) ws = IIBB() environment_type = self._get_arba_environment_type() _logger.info( 'Getting connection to ARBA on %s mode' % environment_type) # argumentos de conectar: self, url=None, proxy="", # wrapper=None, cacert=None, trace=False, testing="" arba_url = self.get_arba_login_url(environment_type) ws.Usuario = cuit ws.Password = self.arba_cit ws.Conectar(url=arba_url) _logger.info( 'Connection getted to ARBA with url "%s" and CUIT %s' % ( arba_url, cuit)) return ws def get_agip_data(self, partner, date): raise UserError(_( 'Falta configuración de credenciales de ADHOC para consulta de ' 'Alícuotas de AGIP')) def get_arba_data(self, partner, from_date, to_date): self.ensure_one() # from_date = date + relativedelta(day=1).strftime('%Y%m%d') # to_date = date + relativedelta( # day=1, days=-1, months=+1).strftime('%Y%m%d') cuit = partner.cuit_required() _logger.info( 'Getting ARBA data for cuit %s from date %s to date %s' % ( from_date, to_date, cuit)) ws = self.arba_connect() ws.ConsultarContribuyentes( from_date, to_date, cuit) <|fim▁hole|> # ' Hubo error general de ARBA? if ws.CodigoError: if ws.CodigoError == '11': # we still create the record so we don need to check it again # on same period _logger.info('CUIT %s not present on padron ARBA' % cuit) else: raise UserError("%s\nError %s: %s" % ( ws.MensajeError, ws.TipoError, ws.CodigoError)) # no ponemos esto, si no viene alicuota es porque es cero entonces # if not ws.AlicuotaRetencion or not ws.AlicuotaPercepcion: # raise UserError('No pudimos obtener la AlicuotaRetencion') # ' Datos generales de la respuesta:' data = { 'numero_comprobante': ws.NumeroComprobante, 'codigo_hash': ws.CodigoHash, # 'CuitContribuyente': ws.CuitContribuyente, 'alicuota_percepcion': ws.AlicuotaPercepcion and float( ws.AlicuotaPercepcion.replace(',', '.')), 'alicuota_retencion': ws.AlicuotaRetencion and float( ws.AlicuotaRetencion.replace(',', '.')), 'grupo_percepcion': ws.GrupoPercepcion, 'grupo_retencion': ws.GrupoRetencion, 'from_date': from_date, 'to_date': to_date, } _logger.info('We get the following data: \n%s' % data) return data def get_cordoba_data(self, partner, date): """ Obtener alícuotas desde app.rentascordoba.gob.ar :param partner: El partner sobre el cual trabajamos :param date: La fecha del comprobante :param from_date: Fecha de inicio de validez de alícuota por defecto :param to_date: Fecha de fin de validez de alícuota por defecto Devuelve diccionario de datos """ _logger.info('Getting withholding data from rentascordoba.gob.ar') date_date = fields.Date.from_string(date) # Establecer parámetros de solicitud url = "https://app.rentascordoba.gob.ar/rentas/rest/svcGetAlicuotas" payload = {'body': partner.main_id_number} headers = {'content-type': 'application/json'} # Realizar solicitud r = requests.post(url, data=json.dumps(payload), headers=headers) json_body = r.json() if r.status_code != 200: raise UserError('Error al contactar rentascordoba.gob.ar. ' 'El servidor respondió: \n\n%s' % json_body) code = json_body.get("errorCod") # Capturar Códigos de Error. # 3 => No Inscripto, 2 => No pasible, 1 => CUIT incorrecta, 0 => OK if code == 3: alicuota_percepcion = self.cdba_alicuota_no_sincripto_percepcion alicuota_retencion = self.cdba_alicuota_no_sincripto_retencion elif code == 2: alicuota_percepcion = 0.0 alicuota_retencion = 0.0 elif code != 0: raise UserError(json_body.get("message")) else: dict_alic = json_body.get("sdtConsultaAlicuotas") alicuota_percepcion = float(dict_alic.get("CRD_ALICUOTA_PER")) alicuota_retencion = float(dict_alic.get("CRD_ALICUOTA_RET")) # Verificar que el comprobante tenga fecha dentro de la vigencia from_date_date = fields.Date.from_string(dict_alic.get("CRD_FECHA_INICIO")) to_date_date = fields.Date.from_string(dict_alic.get("CRD_FECHA_FIN")) if not (from_date_date <= date_date < to_date_date): raise UserError( 'No se puede obtener automáticamente la alicuota para la ' 'fecha %s. Por favor, ingrese la misma manualmente ' 'en el partner.' % date) data = { 'alicuota_percepcion': alicuota_percepcion, 'alicuota_retencion': alicuota_retencion, } _logger.info("We've got the following data: \n%s" % data) return data<|fim▁end|>
if ws.Excepcion: raise UserError("%s\nExcepcion: %s" % ( ws.Traceback, ws.Excepcion))
<|file_name|>async_waiter.cc<|end_file_name|><|fim▁begin|>// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "mojo/public/cpp/environment/async_waiter.h" namespace mojo { AsyncWaiter::AsyncWaiter(Handle handle, MojoHandleSignals signals, const Callback& callback) : waiter_(Environment::GetDefaultAsyncWaiter()), id_(0), callback_(callback) { id_ = waiter_->AsyncWait(handle.value(), signals, MOJO_DEADLINE_INDEFINITE, &AsyncWaiter::WaitComplete, this); } AsyncWaiter::~AsyncWaiter() { if (id_) waiter_->CancelWait(id_); } // static void AsyncWaiter::WaitComplete(void* waiter, MojoResult result) { static_cast<AsyncWaiter*>(waiter)->WaitCompleteInternal(result); } void AsyncWaiter::WaitCompleteInternal(MojoResult result) {<|fim▁hole|>} } // namespace mojo<|fim▁end|>
id_ = 0; callback_.Run(result);
<|file_name|>scan.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # @Author: yancz1989 # @Date: 2017-01-17 23:43:18 # @Last Modified by: yancz1989<|fim▁hole|>import os import json import sys from PIL import Image, ImageDraw import SimpleITK as sitk from env import * def generate_scan_image(subset): list_dirs = os.walk(TRUNK_DIR + subset) jsobjs = [] output_dir = SAMPLE_DIR + subset mkdir(output_dir) for root, dirs, files in list_dirs: for f in files: if f.lower().endswith('mhd'): key = os.path.splitext(f)[0] numpyImage, numpyOrigin, numpySpacing = ( util.load_itk_image( os.path.join(root, f))) for z in range(numpyImage.shape[0]): patch = numpyImage[z, 0:512, 0:512] patch = util.normalizePlanes(patch) im = Image.fromarray(patch * 255).convert('L') output_filename = ( subset + "-" + key + "-" + str(z) + "-scan.bmp") print(subset + '/' + output_filename) im.save(os.path.join( output_dir, output_filename)) jsobjs.append({ "image_path": subset + '/' + output_filename, "rects":[] } ) with open(META_DIR + subset + '-scan.json', 'w') as f: json.dump(jsobjs, f) def get_image_map(data_root, input_file, threshold): result_map = {} with open(input_file) as f: result_list = json.load(f) for it in result_list: key, subset, z = parse_image_file(it['file']) src_file = os.path.join( data_root, subset, key + ".mhd") boxes = filterBoxes(it['box'], threshold) if not result_map.get(src_file): result_map[src_file] = [] result_map[src_file].append((key, z, boxes)) return result_map def generate_result(result_map, output_file): with open(output_file) as fout: fout.write("seriesuid,coordX,coordY,coordZ,probability\n") for fkey, val in result_map.items(): itkimage = sitk.ReadImage(fkey) for it in val: key, z, boxes = val for box in boxes: world_box = voxel_2_world( [z, box[1], box[0]], itkimage) csv_line = key + "," + str(world_box[2]) + "," + str(world_box[1]) + "," + str(world_box[0]) + "," + str(box[4]) fout.write(csv_line + "\n") if __name__ == '__main__': if sys.argv[1] == 'gen': generate_scan_image(sys.argv[2]) else: result_map = get_image_map(TRUNK_DIR, sys.argv[2], 0.01) generate_result(result_map, OUTPUT_FILE)<|fim▁end|>
# @Last Modified time: 2017-02-22 20:33:29 import utilities as util from utilities import parse_image_file, filterBoxes, voxel_2_world, mkdir import numpy as np
<|file_name|>text.rs<|end_file_name|><|fim▁begin|>use std::str; use crate::{ AttributeId, AttributeValue, Document, Node, }; trait StrTrim { fn remove_first(&mut self); fn remove_last(&mut self); } impl StrTrim for String { fn remove_first(&mut self) { self.drain(0..1); } fn remove_last(&mut self) { self.pop(); } } #[derive(Clone, Copy, PartialEq, Debug)] enum XmlSpace { Default, Preserve, } // Prepare text nodes according to the spec: https://www.w3.org/TR/SVG11/text.html#WhiteSpace // // This function handles: // - 'xml:space' processing // - tabs and newlines removing/replacing // - spaces trimming pub fn prepare_text(doc: &mut Document) { // Remember nodes that have 'xml:space' changed. let mut nodes = Vec::new(); _prepare_text(&doc.root(), &mut nodes, XmlSpace::Default); // Remove temporary 'xml:space' attributes created during the text processing. for mut node in nodes { node.remove_attribute(AttributeId::Space); } let root = doc.root().clone(); doc.drain(root, |n| n.is_text() && n.text().is_empty()); } fn _prepare_text(parent: &Node, nodes: &mut Vec<Node>, parent_xmlspace: XmlSpace) { for mut node in parent.children().filter(|n| n.is_element()) { let xmlspace = get_xmlspace(&mut node, nodes, parent_xmlspace); if let Some(child) = node.first_child() { if child.is_text() { prepare_text_children(&node, nodes, xmlspace); continue; } } _prepare_text(&node, nodes, xmlspace); } } fn get_xmlspace(node: &mut Node, nodes: &mut Vec<Node>, default: XmlSpace) -> XmlSpace { { let attrs = node.attributes(); let v = attrs.get_value(AttributeId::Space); if let Some(&AttributeValue::String(ref s)) = v { return if s == "preserve" { XmlSpace::Preserve } else { XmlSpace::Default }; } } // 'xml:space' is not set - set it manually. set_xmlspace(node, nodes, default); default } fn set_xmlspace(node: &mut Node, nodes: &mut Vec<Node>, xmlspace: XmlSpace) { let xmlspace_str = match xmlspace { XmlSpace::Default => "default", XmlSpace::Preserve => "preserve", }; node.set_attribute((AttributeId::Space, xmlspace_str)); nodes.push(node.clone()); } fn prepare_text_children(parent: &Node, marked_nodes: &mut Vec<Node>, xmlspace: XmlSpace) { // Trim all descendant text nodes. for mut child in parent.descendants() { if child.is_text() { let child_xmlspace = get_xmlspace(&mut child.parent().unwrap(), marked_nodes, xmlspace); let new_text = { let text = child.text(); trim(text.as_ref(), child_xmlspace) }; child.set_text(&new_text); } } let mut nodes = Vec::new(); collect_text(parent, 0, &mut nodes); // `trim` method has already collapsed all spaces into a single one, // so we have to check only for one leading or trailing space. if nodes.len() == 1 { // Process element with a single text node child. let mut node = nodes[0].0.clone(); if xmlspace == XmlSpace::Default { let mut text = node.text_mut(); match text.len() { 0 => {} // An empty string. Do nothing. 1 => { // If string has only one character and it's a space - clear this string. if text.as_bytes()[0] == b' ' { text.clear(); } } _ => { // 'text' has at least 2 bytes, so indexing is safe. let c1 = text.as_bytes()[0]; let c2 = text.as_bytes()[text.len() - 1]; if c1 == b' ' { text.remove_first(); } if c2 == b' ' { text.remove_last(); } } } } else { // Do nothing when xml:space=preserve. } } else if nodes.len() > 1 { // Process element with many text node children. // We manage all text nodes as a single text node // and trying to remove duplicated spaces across nodes. // // For example '<text>Text <tspan> text </tspan> text</text>' // is the same is '<text>Text <tspan>text</tspan> text</text>' let mut i = 0; let len = nodes.len() - 1; let mut last_non_empty: Option<Node> = None; while i < len { // Process pairs. let (mut node1, depth1) = nodes[i].clone(); let (mut node2, depth2) = nodes[i + 1].clone(); if node1.text().is_empty() { if let Some(ref n) = last_non_empty { node1 = n.clone(); } } // Parent of the text node is always an element node and always exist, // so unwrap is safe. let xmlspace1 = get_xmlspace(&mut node1.parent().unwrap(), marked_nodes, xmlspace); let xmlspace2 = get_xmlspace(&mut node2.parent().unwrap(), marked_nodes, xmlspace); // >text<..>text< // 1 2 3 4 let (c1, c2, c3, c4) = { let text1 = node1.text(); let text2 = node2.text(); let bytes1 = text1.as_bytes(); let bytes2 = text2.as_bytes();<|fim▁hole|> let c3 = bytes2.first().cloned(); let c4 = bytes2.last().cloned(); (c1, c2, c3, c4) }; // NOTE: xml:space processing is mostly an undefined behavior, // because everyone do this differently. // We mimic Chrome behavior. // Remove space from the second text node if both nodes has bound spaces. // From: '<text>Text <tspan> text</tspan></text>' // To: '<text>Text <tspan>text</tspan></text>' // // See text-tspan-02-b.svg for details. if c2 == Some(b' ') && c2 == c3 { if depth1 < depth2 { if xmlspace2 == XmlSpace::Default { node2.text_mut().remove_first(); } } else { if xmlspace1 == XmlSpace::Default && xmlspace2 == XmlSpace::Default { node1.text_mut().remove_last(); } else if xmlspace1 == XmlSpace::Preserve && xmlspace2 == XmlSpace::Default { node2.text_mut().remove_first(); } } } let is_first = i == 0; let is_last = i == len - 1; if is_first && c1 == Some(b' ') && xmlspace1 == XmlSpace::Default && !node1.text().is_empty() { // Remove leading space of the first text node. node1.text_mut().remove_first(); } else if is_last && c4 == Some(b' ') && !node2.text().is_empty() && xmlspace2 == XmlSpace::Default { // Remove trailing space of the last text node. // Also check that 'text2' is not empty already. node2.text_mut().remove_last(); } if is_last && c2 == Some(b' ') && !node1.text().is_empty() && node2.text().is_empty() && node1.text().ends_with(' ') { node1.text_mut().remove_last(); } if !node1.text().trim().is_empty() { last_non_empty = Some(node1.clone()); } i += 1; } } } fn collect_text(parent: &Node, depth: usize, nodes: &mut Vec<(Node, usize)>) { for child in parent.children() { if child.is_text() { nodes.push((child.clone(), depth)); } else if child.is_element() { collect_text(&child, depth + 1, nodes); } } } fn trim(text: &str, space: XmlSpace) -> String { let mut s = String::with_capacity(text.len()); let mut prev = '0'; for c in text.chars() { // \r, \n and \t should be converted into spaces. let c = match c { '\r' | '\n' | '\t' => ' ', _ => c, }; // Skip continuous spaces. if space == XmlSpace::Default && c == ' ' && c == prev { continue; } prev = c; s.push(c); } s }<|fim▁end|>
let c1 = bytes1.first().cloned(); let c2 = bytes1.last().cloned();
<|file_name|>mail.py<|end_file_name|><|fim▁begin|>import os import smtplib from email.mime.text import MIMEText from email.mime.image import MIMEImage from email.mime.multipart import MIMEMultipart <|fim▁hole|> emailCount = 0; def __init__(self, address, password): self.address = address self.password = password email.emailCount += 1 def initSMTP(self, emailserver, port): self.smtpconnection = smtplib.SMTP(emailserver, port) #returns an SMTP object self.smtpconnection.ehlo() #says "hello" to smtp server self.smtpconnection.starttls() #enable TLS encryption self.smtpconnection.login(self.address, self.password) def sendEmail(self, recipient, subject, message, imgPath): msg = MIMEMultipart() msg["Subject"] = subject msg["From"] = self.address msg["To"] = recipient msg.attach(MIMEText(message)) imgfp = open(imgPath, "rb") img = MIMEImage(imgfp.read()) imgfp.close() msg.attach(img) self.smtpconnection.sendmail(self.address, recipient, msg.as_string()) def closeSMTP(self): self.smtpconnection.close()<|fim▁end|>
class Email:
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>import { useState, useEffect } from "react"; import { FixedSizeList as List } from "react-window"; import './index.scss'; import { ReactComponent as MenuIcon } from './menu.svg'; import { ReactComponent as DownIcon } from './down.svg'; import Select, {components, InputActionMeta, InputProps} from 'react-select'; import { useManagerState } from "../../manager.context"; import { Query, QueryRevision } from "../queries"; import { Param, parseParams, stringifyParams } from "../parameters"; type option = { label: string, value: QueryRevision } const Input = (props: InputProps) => <components.Input {...props} isHidden={false} />; const height = 43; const MenuList = (props: any) => { const { options, children, maxHeight, getValue } = props; const [value] = getValue(); const initialOffset = options.indexOf(value) * height; return ( <List width={"100%"} height={maxHeight} itemCount={children.length} itemSize={height} initialScrollOffset={initialOffset} > {({ index, style }) => <div style={style}>{children[index]}</div>} </List> ) } type EditableSelectProps = { selectedQuery: QueryRevision | null, options: option[], params: Param[], onChange: (query: QueryRevision, params: Param[]) => void, } // Source: https://github.com/JedWatson/react-select/issues/1558#issuecomment-738880505 function EditableSelect(props: EditableSelectProps) { const defaultOption = props.selectedQuery ? { label: stringifyQueryRevision(props.selectedQuery), value: props.selectedQuery} : null; const [option, setOption] = useState<option | null>(defaultOption); const enabledParams = props.params.filter(p => p.enabled); const queryParams = stringifyParams(enabledParams); const defaultOptionLabel = defaultOption?.label || ""; const defaultInputValue = Boolean(queryParams) ? `${defaultOptionLabel}?${queryParams}` : defaultOptionLabel; const [inputValue, setInputValue] = useState(defaultInputValue); const [reflectParams, setReflectParams] = useState(true); useEffect(() => { if (!reflectParams) { return } if (option) { const newInputValue = Boolean(queryParams) ? `${option.label}?${queryParams}` : option.label; setInputValue(newInputValue); } else { const newInputValue = Boolean(queryParams) ? `?${queryParams}` : '' setInputValue(newInputValue); } }, [queryParams]); useEffect(() => { if (!reflectParams) { return } if (props.selectedQuery) { const label = stringifyQueryRevision(props.selectedQuery); setOption({ label: label, value: props.selectedQuery});<|fim▁hole|> const newInputValue = Boolean(queryParams) ? `${label}?${queryParams}` : label; setInputValue(newInputValue); } }, [props.selectedQuery]) const onInputChange = (inputValue: string, { action }: InputActionMeta) => { if (action !== "input-change") { return; } setInputValue(inputValue); let choosenOption; const [namespace, queryName, revision] = parseQueryRevision(inputValue); if (namespace && queryName && revision) { const newOption = props.options.find(o => o.value.namespace === namespace && o.value.name === queryName && o.value.revision === revision) if (newOption) { choosenOption = newOption; } } const originalParams = props.params; const inputParams = parseParams(inputValue); const updatedParams = mergeParams(originalParams, inputParams); const qr = choosenOption?.value || option?.value as QueryRevision; props.onChange(qr, updatedParams); }; const onChange = (option: option | null) => { if (!option) { return } setOption(option); const newInputValue = Boolean(queryParams) ? `${option.label}?${queryParams}` : option.label; setInputValue(newInputValue); props.onChange(option.value, props.params); }; const selectedNamespace = props.selectedQuery?.namespace || ""; const selectedQueryName = props.selectedQuery?.name || ""; return ( <Select {...props} className="query-controls__selector" classNamePrefix="query-controls__selector" value={option} inputValue={inputValue} onInputChange={onInputChange} onChange={onChange} onFocus={() => setReflectParams(false)} onBlur={() => setReflectParams(true)} controlShouldRenderValue={false} components={{Input, MenuList}} noOptionsMessage={disableNoOptionsMessage} filterOption={filterOption(selectedNamespace, selectedQueryName)} escapeClearsValue={true} backspaceRemovesValue={true} isClearable={true} placeholder="Queries..." /> ); } function mergeParams(original: Param[], input: Param[]): Param[] { let updatedParams = []; let updatedIndex = 0; for (let originalIndex = 0; originalIndex < original.length; originalIndex++) { const p = original[originalIndex]; if (!p.enabled) { updatedParams.push(p); continue } const inputedParam = input[updatedIndex]; // If the param was deleted on editing the input it may not find the index on the input params arrays if (inputedParam) { updatedParams.push(inputedParam); updatedIndex++; } } const newParams = input.slice(updatedIndex, input.length); updatedParams = updatedParams.concat(newParams); return updatedParams; } const filterOption = (selectedNamespace: string, selectedQuery: string) => (candidate: {label: string, value: string, data: any}, input: string): boolean => { if (!Boolean(input) && !Boolean(selectedNamespace) && !Boolean(selectedQuery)) { return false; } if (!Boolean(input) && Boolean(selectedNamespace) && Boolean(selectedQuery)) { const namespacedQuery = `${selectedNamespace}/${selectedQuery}` return candidate.label.includes(namespacedQuery); } const [namespace, queryName] = parseQueryRevision(input); if (namespace && queryName) { const namespacedQuery = `/${namespace}/${queryName}` return candidate.label.includes(namespacedQuery); } return candidate.label.includes(input); }; function stringifyQueryRevision(queryRevision: QueryRevision): string { return `/${queryRevision.namespace}/${queryRevision.name}/${queryRevision.revision}`; } const queryTargetRegex = /\/([^?/]*)/gm; function parseQueryRevision(input: string): [string, string, number] { let matches = input.match(queryTargetRegex) || []; matches = matches.map(s => s.replace('/', '')); return [matches[0] || "", matches[1] || "", parseInt(matches[2] || "") || 0]; } const disableNoOptionsMessage = () => null; type QueryControlsProps = { params: Param[], disableActions: {run: boolean, save: boolean, archive: boolean}, onChange: (query: QueryRevision, params: Param[]) => void, onRun: () => void, onSave: () => void, onMenuOpen: () => void, onArchiveQuery: () => void, onArchiveRevision: () => void, } function QueryControls(props: QueryControlsProps) { const {queries, archivedQueries, selectedQuery} = useManagerState(); let options = getOptions(queries); if (selectedQuery?.archived) { const archivedOptions = getOptions(archivedQueries); options = [...options, ...archivedOptions].sort(); } const [showArchiveActions, setShowArchiveActions] = useState(false); const handleAction = (handler: () => void) => () => { setShowArchiveActions(false); handler() } return ( <header className="query-controls--wrapper"> {selectedQuery?.archived && <p className="query-controls__archived-alert">This query is archived!</p>} <nav className="query-controls"> <div> <MenuIcon style={{cursor: 'pointer'}} onClick={props.onMenuOpen} className="query-controls__menu" /> </div> <div className="query-controls__selector--wrapper"> <p>{"/run-query"}</p> <EditableSelect options={options} selectedQuery={selectedQuery} params={props.params} onChange={props.onChange} /> </div> <div className="query-controls__actions--wrapper"> <button className="query-controls__actions__run" disabled={props.disableActions.run} onClick={props.onRun}>Run</button> <div className="query-controls__actions__mutation"> <div className="query-controls__actions__mutation__primary-actions"> <button className={"query-controls__actions__save " + (showArchiveActions ? "query-controls__actions__save--archive-open" : "")} disabled={props.disableActions.save} onClick={props.onSave}> Save </button> <button className={"query-controls__actions__mutation__menu " + (showArchiveActions ? "query-controls__actions__save--archive-open" : "")} disabled={props.disableActions.save} onClick={() => setShowArchiveActions(!showArchiveActions)} > <DownIcon className="query-controls__actions__mutation__menu__icon" /> </button> </div> {showArchiveActions && ( <ul className={"query-controls__actions__mutation__archive " + (props.disableActions.archive ? "query-controls__actions__mutation__archive--disabled" : "")}> <li onClick={handleAction(props.onArchiveRevision)}>Archive revision</li> <li onClick={handleAction(props.onArchiveQuery)}>Archive query</li> </ul> )} </div> </div> </nav> </header> ) } function getOptions(queries: Record<string, Query[]>): option[] { const options = Object.values(queries).flatMap(namespacedQueries => { return namespacedQueries.flatMap(q => { return q.revisions.map(r => { return buildOptionFromQuery(q, r); }) }) }); return options; } function buildOptionFromQuery(q: Query, rev: {revision: number, archived: boolean, text: string}): option { const qr = {name: q.name, namespace: q.namespace, revision: rev.revision, archived: rev.archived, text: rev.text}; return { value: qr, label: stringifyQueryRevision(qr) } } export default QueryControls<|fim▁end|>
<|file_name|>viewLoanGroup.js<|end_file_name|><|fim▁begin|>Behaviour.register({ '#backButton': function(button) {<|fim▁hole|> button.onclick = function() { if (isNaN(memberId) || memberId <= 0) { self.location = pathPrefix + "/searchLoanGroups"; } else { self.location = pathPrefix + "/memberLoanGroups?memberId=" + memberId; } } } });<|fim▁end|>
<|file_name|>load-as-root-component.ts<|end_file_name|><|fim▁begin|>import {Component, DynamicComponentLoader, ElementRef, Injector} from 'angular2/core'; import DynamicComponent from './dynamic-component'; import Hello from './hello'; @Component({ selector: 'load-as-root-component', directives: [ Hello ], template: `<|fim▁hole|> <button class="btn btn-warning" (click)="loadComponent()"> Load Component </button> <div id="anchor"></div> </div>` }) export default class LoadAsRootComponent { constructor( private dcl: DynamicComponentLoader, private elementRef: ElementRef, private injector: Injector) { } loadComponent() { this.dcl.loadAsRoot(DynamicComponent, '#anchor', this.injector) .then(componentRef => console.log('loadAsRoot', componentRef)); } }<|fim▁end|>
<div class="wrapper"> <h3>LoadAsRoot Component</h3>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Atomic counting semaphore that can help you control access to a common resource //! by multiple processes in a concurrent system. //! //! ## Features //! //! - Effectively lock-free* semantics //! - Provides RAII-style acquire/release API //! - Implements `Send`, `Sync` and `Clone` //! //! _* lock-free when not using the `shutdown` API_ extern crate parking_lot; use std::sync::Arc; use parking_lot::RwLock; mod raw; use raw::RawSemaphore; mod guard; pub use guard::SemaphoreGuard; mod shutdown;<|fim▁hole|> #[cfg(test)] mod tests; /// Result returned from `Semaphore::try_access`. pub type TryAccessResult<T> = Result<SemaphoreGuard<T>, TryAccessError>; #[derive(Copy, Clone, Debug, PartialEq)] /// Error indicating a failure to acquire access to the resource /// behind the semaphore. /// /// Returned from `Semaphore::try_access`. pub enum TryAccessError { /// This semaphore has shut down and will no longer grant access to the underlying resource. Shutdown, /// This semaphore has no more capacity to grant further access. /// Other access needs to be released before this semaphore can grant more. NoCapacity } /// Counting semaphore to control concurrent access to a common resource. pub struct Semaphore<T> { raw: Arc<RawSemaphore>, resource: Arc<RwLock<Option<Arc<T>>>> } impl<T> Clone for Semaphore<T> { fn clone(&self) -> Semaphore<T> { Semaphore { raw: self.raw.clone(), resource: self.resource.clone() } } } impl<T> Semaphore<T> { /// Create a new semaphore around a resource. /// /// The semaphore will limit the number of processes that can access /// the underlying resource at every point in time to the specified capacity. pub fn new(capacity: usize, resource: T) -> Self { Semaphore { raw: Arc::new(RawSemaphore::new(capacity)), resource: Arc::new(RwLock::new(Some(Arc::new(resource)))) } } #[inline] /// Attempt to access the underlying resource of this semaphore. /// /// This function will try to acquire access, and then return an RAII /// guard structure which will release the access when it falls out of scope. /// If the semaphore is out of capacity or shut down, a `TryAccessError` will be returned. pub fn try_access(&self) -> TryAccessResult<T> { if let Some(ref resource) = *self.resource.read() { if self.raw.try_acquire() { Ok(guard::new(&self.raw, resource)) } else { Err(TryAccessError::NoCapacity) } } else { Err(TryAccessError::Shutdown) } } /// Shut down the semaphore. /// /// This prevents any further access from being granted to the underlying resource. /// As soon as the last access is released and the returned handle goes out of scope, /// the resource will be dropped. /// /// Does _not_ block until the resource is no longer in use. If you would like to do that, /// you can call `wait` on the returned handle. pub fn shutdown(&self) -> ShutdownHandle<T> { shutdown::new(&self.raw, self.resource.write().take()) } }<|fim▁end|>
pub use shutdown::ShutdownHandle;
<|file_name|>weather.js<|end_file_name|><|fim▁begin|><|fim▁hole|> classNames: 'weather-view dashboard-item' });<|fim▁end|>
import Ember from 'ember'; export default Ember.View.extend({
<|file_name|>strings.py<|end_file_name|><|fim▁begin|>import os import re import codecs import subprocess import tempfile import shutil from .tylogger import logger DEFAULT_ENCODING = 'utf16' class Strings(object): def __init__(self, encoding=DEFAULT_ENCODING, aliases=None): self.encoding = encoding if encoding else DEFAULT_ENCODING self.__references = {} self.aliases = aliases if aliases else [] self.temp_dir = None def generate(self, files, dst): """generate strings :param dst: destination directory :param files: input files :return generate strings dicts """ dst_dir = os.path.abspath(dst) results = {} if self.temp_dir is None: logger.process('Generating Strings...') self.__generate_strings_temp_file(files) logger.done('Generated Strings') for filename in os.listdir(self.temp_dir): logger.debug('generated %s' % filename) reference = self.parsing(os.path.join(dst_dir, filename), encoding=self.encoding) self.__references[filename] = reference logger.done('Generated Reference') for k, v in self.__references.items(): logger.info('%s count: %d' % (k, len(v))) for basename, ref in self.__references.items(): target_abspath = os.path.join(dst_dir, basename) dirname = os.path.dirname(target_abspath) if not os.path.exists(dirname): os.makedirs(dirname) shutil.copy(os.path.join(self.temp_dir, basename), target_abspath) results[basename] = self.translate(target_abspath, ref, self.encoding) return results def __generate_strings_temp_file(self, source_files): """run `genstrings` script. generate `.strings` files to a temp directory. :param source_files: input files :return: temp directory """ script = 'genstrings' for filename in source_files: script += ' %s' % filename if len(self.aliases) > 0: script += ' -s' for alias in self.aliases: script += ' %s' % alias temp_dir = tempfile.mkdtemp() self.__run_script('%s -o %s' % (script, temp_dir)) self.temp_dir = temp_dir return temp_dir def __del__(self): if self.temp_dir: shutil.rmtree(self.temp_dir, ignore_errors=True) @staticmethod def __run_script(script): logger.debug('run: %s' % script) process = subprocess.Popen(script, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output = '' while process.poll() is None: line = process.stdout.readline() if line: output += line logger.debug(line.strip()) logger.finished(process.returncode) return process.returncode, output @staticmethod def parsing(filename, encoding=DEFAULT_ENCODING): """parsing `.strings` file. :param filename: .strings filename :param encoding: file encoding :return: reference """ reference = dict((elem[0], elem[1]) for elem in Strings.__reference_generator(filename, encoding)) return reference <|fim▁hole|> @staticmethod def parsing_elems(filename, encoding=DEFAULT_ENCODING): return list(Strings.__reference_generator(filename, encoding)) @staticmethod def __reference_generator(filename, encoding=DEFAULT_ENCODING): if os.path.exists(filename): line_end = [0] contents = '' with codecs.open(filename, mode='r', encoding=encoding if encoding else DEFAULT_ENCODING) as f: for line in f.readlines(): contents += line line_end.append(len(contents)) prog = re.compile(r"\s*\"(?P<key>.*?)\"\s*=\s*\"(?P<value>[\s\S]*?)\"\s*;", re.MULTILINE) for match in prog.finditer(contents): key = match.group('key') key_start = match.start('key') value = match.group('value') match.groupdict() line_no = next(i for i in range(len(line_end)) if line_end[i] > key_start) yield (key, value, line_no) @property def generated_filenames(self): """generated strings files basenames e.g.: 'Localizable.strings' :return: strings filenames """ return self.__references.keys() @staticmethod def translate(dst, reference, encoding=DEFAULT_ENCODING): """translate strings file by reference :param dst: destination strings file :param reference: translation reference :param encoding: file encoding :return: result dict """ result = {} translated = [] try: f = codecs.open(dst, "r", DEFAULT_ENCODING) lines = f.readlines() for (index, line) in enumerate(lines): match = re.match(r'"(?P<key>.*?)" = "(?P<value>.*?)";', line) if match is not None: key = match.group('key') value = match.group('value') answer = reference.get(key, None) if answer is not None: if reference[key] != value: line = '"%s" = "%s";\n' % (key, answer) lines[index] = line translated.append(key) result[key] = answer else: result[key] = value f.close() logger.done('Translated: %s' % dst) logger.info('count: %d' % len(translated)) for k in translated: logger.debug('%s => %s' % (k, result[k])) f = codecs.open(dst, "w+", encoding=encoding) f.writelines(lines) f.flush() f.close() return result # logger.addition('Write strings file to: %s' % self.filename) except Exception as e: logger.error(e)<|fim▁end|>
<|file_name|>treeAndTable.js<|end_file_name|><|fim▁begin|>/** * @author: * @date: 2016/1/21 */ define(["core/js/layout/Panel"], function (Panel) { var view = Panel.extend({ /*Panel的配置项 start*/ title:"表单-", help:"内容", brief:"摘要", /*Panel 配置 End*/ oninitialized:function(triggerEvent){ this._super(); this.mainRegion={ comXtype:$Component.TREE, comConf:{ data:[this.getModuleTree("core/js/base/AbstractView")], } }; var that = this; this.footerRegion = { comXtype: $Component.TOOLSTRIP, comConf: { /*Panel的配置项 start*/ textAlign: $TextAlign.RIGHT, items: [{ text: "展开所有节点", onclick: function (e) { that.getMainRegionRef().getComRef().expandAll(); }, },{ text: "折叠所有节点", onclick: function (e) { that.getMainRegionRef().getComRef().collapseAll(); }, }] /*Panel 配置 End*/ } }; }, getModuleTree:function(moduleName,arr){ var va = window.rtree.tree[moduleName];<|fim▁hole|> var tree = {title: moduleName}; if(!arr){ arr = []; }else{ if(_.contains(arr,moduleName)){ return false; } } arr.push(moduleName); if(va&&va.deps&&va.deps.length>0){ tree.children = []; tree.folder=true; for(var i=0;i<va.deps.length;i++){ var newTree = this.getModuleTree(va.deps[i],arr); if(newTree){ tree.children.push(newTree); }else{ if(!tree.count){ tree.count = 0; } tree.count++; } } } return tree; } }); return view; });<|fim▁end|>
<|file_name|>test_rand.py<|end_file_name|><|fim▁begin|># Copyright (c) Frederick Dean # See LICENSE for details. """ Unit tests for :py:obj:`OpenSSL.rand`. """ from unittest import main import os import stat from OpenSSL.test.util import TestCase, b from OpenSSL import rand class RandTests(TestCase): def test_bytes_wrong_args(self): """ :py:obj:`OpenSSL.rand.bytes` raises :py:obj:`TypeError` if called with the wrong number of arguments or with a non-:py:obj:`int` argument. """ self.assertRaises(TypeError, rand.bytes) self.assertRaises(TypeError, rand.bytes, None) self.assertRaises(TypeError, rand.bytes, 3, None) # XXX Test failure of the malloc() in rand_bytes. def test_bytes(self): """ Verify that we can obtain bytes from rand_bytes() and that they are different each time. Test the parameter of rand_bytes() for bad values. """ b1 = rand.bytes(50) self.assertEqual(len(b1), 50) b2 = rand.bytes(num_bytes=50) # parameter by name self.assertNotEqual(b1, b2) # Hip, Hip, Horay! FIPS complaince b3 = rand.bytes(num_bytes=0) self.assertEqual(len(b3), 0) exc = self.assertRaises(ValueError, rand.bytes, -1) self.assertEqual(str(exc), "num_bytes must not be negative") def test_add_wrong_args(self): """ When called with the wrong number of arguments, or with arguments not of type :py:obj:`str` and :py:obj:`int`, :py:obj:`OpenSSL.rand.add` raises :py:obj:`TypeError`. """ self.assertRaises(TypeError, rand.add) self.assertRaises(TypeError, rand.add, b("foo"), None) self.assertRaises(TypeError, rand.add, None, 3) self.assertRaises(TypeError, rand.add, b("foo"), 3, None) def test_add(self): """ :py:obj:`OpenSSL.rand.add` adds entropy to the PRNG. """ rand.add(b('hamburger'), 3) def test_seed_wrong_args(self): """ When called with the wrong number of arguments, or with a non-:py:obj:`str` argument, :py:obj:`OpenSSL.rand.seed` raises :py:obj:`TypeError`. """ self.assertRaises(TypeError, rand.seed) self.assertRaises(TypeError, rand.seed, None) self.assertRaises(TypeError, rand.seed, b("foo"), None) def test_seed(self): """ :py:obj:`OpenSSL.rand.seed` adds entropy to the PRNG. """ rand.seed(b('milk shake')) def test_status_wrong_args(self): """ :py:obj:`OpenSSL.rand.status` raises :py:obj:`TypeError` when called with any arguments. """ self.assertRaises(TypeError, rand.status, None) <|fim▁hole|> def test_status(self): """ :py:obj:`OpenSSL.rand.status` returns :py:obj:`True` if the PRNG has sufficient entropy, :py:obj:`False` otherwise. """ # It's hard to know what it is actually going to return. Different # OpenSSL random engines decide differently whether they have enough # entropy or not. self.assertTrue(rand.status() in (1, 2)) def test_egd_wrong_args(self): """ :py:obj:`OpenSSL.rand.egd` raises :py:obj:`TypeError` when called with the wrong number of arguments or with arguments not of type :py:obj:`str` and :py:obj:`int`. """ self.assertRaises(TypeError, rand.egd) self.assertRaises(TypeError, rand.egd, None) self.assertRaises(TypeError, rand.egd, "foo", None) self.assertRaises(TypeError, rand.egd, None, 3) self.assertRaises(TypeError, rand.egd, "foo", 3, None) def test_egd_missing(self): """ :py:obj:`OpenSSL.rand.egd` returns :py:obj:`0` or :py:obj:`-1` if the EGD socket passed to it does not exist. """ result = rand.egd(self.mktemp()) expected = (-1, 0) self.assertTrue( result in expected, "%r not in %r" % (result, expected)) def test_cleanup_wrong_args(self): """ :py:obj:`OpenSSL.rand.cleanup` raises :py:obj:`TypeError` when called with any arguments. """ self.assertRaises(TypeError, rand.cleanup, None) def test_cleanup(self): """ :py:obj:`OpenSSL.rand.cleanup` releases the memory used by the PRNG and returns :py:obj:`None`. """ self.assertIdentical(rand.cleanup(), None) def test_load_file_wrong_args(self): """ :py:obj:`OpenSSL.rand.load_file` raises :py:obj:`TypeError` when called the wrong number of arguments or arguments not of type :py:obj:`str` and :py:obj:`int`. """ self.assertRaises(TypeError, rand.load_file) self.assertRaises(TypeError, rand.load_file, "foo", None) self.assertRaises(TypeError, rand.load_file, None, 1) self.assertRaises(TypeError, rand.load_file, "foo", 1, None) def test_write_file_wrong_args(self): """ :py:obj:`OpenSSL.rand.write_file` raises :py:obj:`TypeError` when called with the wrong number of arguments or a non-:py:obj:`str` argument. """ self.assertRaises(TypeError, rand.write_file) self.assertRaises(TypeError, rand.write_file, None) self.assertRaises(TypeError, rand.write_file, "foo", None) def test_files(self): """ Test reading and writing of files via rand functions. """ # Write random bytes to a file tmpfile = self.mktemp() # Make sure it exists (so cleanup definitely succeeds) fObj = open(tmpfile, 'w') fObj.close() try: rand.write_file(tmpfile) # Verify length of written file size = os.stat(tmpfile)[stat.ST_SIZE] self.assertEquals(size, 1024) # Read random bytes from file rand.load_file(tmpfile) rand.load_file(tmpfile, 4) # specify a length finally: # Cleanup os.unlink(tmpfile) if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>feature.pb.cc<|end_file_name|><|fim▁begin|>// Generated by the protocol buffer compiler. DO NOT EDIT! // source: diplomacy_tensorflow/core/example/feature.proto #include "diplomacy_tensorflow/core/example/feature.pb.h" #include <algorithm> #include <google/protobuf/stubs/common.h> #include <google/protobuf/stubs/port.h> #include <google/protobuf/io/coded_stream.h> #include <google/protobuf/wire_format_lite_inl.h> #include <google/protobuf/descriptor.h> #include <google/protobuf/generated_message_reflection.h> #include <google/protobuf/reflection_ops.h> #include <google/protobuf/wire_format.h> // This is a temporary google only hack #ifdef GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS #include "third_party/protobuf/version.h" #endif // @@protoc_insertion_point(includes) namespace protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto { extern PROTOBUF_INTERNAL_EXPORT_protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_BytesList; extern PROTOBUF_INTERNAL_EXPORT_protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_FloatList; extern PROTOBUF_INTERNAL_EXPORT_protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_Int64List; extern PROTOBUF_INTERNAL_EXPORT_protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_FeatureList; extern PROTOBUF_INTERNAL_EXPORT_protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_FeatureLists_FeatureListEntry_DoNotUse; extern PROTOBUF_INTERNAL_EXPORT_protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_Features_FeatureEntry_DoNotUse; extern PROTOBUF_INTERNAL_EXPORT_protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_Feature; } // namespace protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto namespace diplomacy { namespace tensorflow { class BytesListDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<BytesList> _instance; } _BytesList_default_instance_; class FloatListDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<FloatList> _instance; } _FloatList_default_instance_; class Int64ListDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<Int64List> _instance; } _Int64List_default_instance_; class FeatureDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<Feature> _instance; const ::diplomacy::tensorflow::BytesList* bytes_list_; const ::diplomacy::tensorflow::FloatList* float_list_; const ::diplomacy::tensorflow::Int64List* int64_list_; } _Feature_default_instance_; class Features_FeatureEntry_DoNotUseDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<Features_FeatureEntry_DoNotUse> _instance; } _Features_FeatureEntry_DoNotUse_default_instance_; class FeaturesDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<Features> _instance; } _Features_default_instance_; class FeatureListDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<FeatureList> _instance; } _FeatureList_default_instance_; class FeatureLists_FeatureListEntry_DoNotUseDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<FeatureLists_FeatureListEntry_DoNotUse> _instance; } _FeatureLists_FeatureListEntry_DoNotUse_default_instance_; class FeatureListsDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<FeatureLists> _instance; } _FeatureLists_default_instance_; } // namespace tensorflow } // namespace diplomacy namespace protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto { static void InitDefaultsBytesList() { GOOGLE_PROTOBUF_VERIFY_VERSION; { void* ptr = &::diplomacy::tensorflow::_BytesList_default_instance_; new (ptr) ::diplomacy::tensorflow::BytesList(); ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); } ::diplomacy::tensorflow::BytesList::InitAsDefaultInstance(); } ::google::protobuf::internal::SCCInfo<0> scc_info_BytesList = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsBytesList}, {}}; static void InitDefaultsFloatList() { GOOGLE_PROTOBUF_VERIFY_VERSION; { void* ptr = &::diplomacy::tensorflow::_FloatList_default_instance_; new (ptr) ::diplomacy::tensorflow::FloatList(); ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); } ::diplomacy::tensorflow::FloatList::InitAsDefaultInstance(); } ::google::protobuf::internal::SCCInfo<0> scc_info_FloatList = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsFloatList}, {}}; static void InitDefaultsInt64List() { GOOGLE_PROTOBUF_VERIFY_VERSION; { void* ptr = &::diplomacy::tensorflow::_Int64List_default_instance_; new (ptr) ::diplomacy::tensorflow::Int64List(); ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); } ::diplomacy::tensorflow::Int64List::InitAsDefaultInstance(); } ::google::protobuf::internal::SCCInfo<0> scc_info_Int64List = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsInt64List}, {}}; static void InitDefaultsFeature() { GOOGLE_PROTOBUF_VERIFY_VERSION; { void* ptr = &::diplomacy::tensorflow::_Feature_default_instance_; new (ptr) ::diplomacy::tensorflow::Feature(); ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); } ::diplomacy::tensorflow::Feature::InitAsDefaultInstance(); } ::google::protobuf::internal::SCCInfo<3> scc_info_Feature = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 3, InitDefaultsFeature}, { &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_BytesList.base, &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_FloatList.base, &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Int64List.base,}}; static void InitDefaultsFeatures_FeatureEntry_DoNotUse() { GOOGLE_PROTOBUF_VERIFY_VERSION; { void* ptr = &::diplomacy::tensorflow::_Features_FeatureEntry_DoNotUse_default_instance_; new (ptr) ::diplomacy::tensorflow::Features_FeatureEntry_DoNotUse(); } ::diplomacy::tensorflow::Features_FeatureEntry_DoNotUse::InitAsDefaultInstance(); } ::google::protobuf::internal::SCCInfo<1> scc_info_Features_FeatureEntry_DoNotUse = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsFeatures_FeatureEntry_DoNotUse}, { &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Feature.base,}}; static void InitDefaultsFeatures() { GOOGLE_PROTOBUF_VERIFY_VERSION; { void* ptr = &::diplomacy::tensorflow::_Features_default_instance_; new (ptr) ::diplomacy::tensorflow::Features(); ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); } ::diplomacy::tensorflow::Features::InitAsDefaultInstance(); } ::google::protobuf::internal::SCCInfo<1> scc_info_Features = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsFeatures}, { &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Features_FeatureEntry_DoNotUse.base,}}; static void InitDefaultsFeatureList() { GOOGLE_PROTOBUF_VERIFY_VERSION; { void* ptr = &::diplomacy::tensorflow::_FeatureList_default_instance_; new (ptr) ::diplomacy::tensorflow::FeatureList(); ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); } ::diplomacy::tensorflow::FeatureList::InitAsDefaultInstance(); } ::google::protobuf::internal::SCCInfo<1> scc_info_FeatureList = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsFeatureList}, { &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Feature.base,}}; static void InitDefaultsFeatureLists_FeatureListEntry_DoNotUse() { GOOGLE_PROTOBUF_VERIFY_VERSION; { void* ptr = &::diplomacy::tensorflow::_FeatureLists_FeatureListEntry_DoNotUse_default_instance_; new (ptr) ::diplomacy::tensorflow::FeatureLists_FeatureListEntry_DoNotUse(); } ::diplomacy::tensorflow::FeatureLists_FeatureListEntry_DoNotUse::InitAsDefaultInstance(); } ::google::protobuf::internal::SCCInfo<1> scc_info_FeatureLists_FeatureListEntry_DoNotUse = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsFeatureLists_FeatureListEntry_DoNotUse}, { &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_FeatureList.base,}}; static void InitDefaultsFeatureLists() { GOOGLE_PROTOBUF_VERIFY_VERSION; { void* ptr = &::diplomacy::tensorflow::_FeatureLists_default_instance_; new (ptr) ::diplomacy::tensorflow::FeatureLists(); ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); } ::diplomacy::tensorflow::FeatureLists::InitAsDefaultInstance(); } ::google::protobuf::internal::SCCInfo<1> scc_info_FeatureLists = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsFeatureLists}, { &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_FeatureLists_FeatureListEntry_DoNotUse.base,}}; void InitDefaults() { ::google::protobuf::internal::InitSCC(&scc_info_BytesList.base); ::google::protobuf::internal::InitSCC(&scc_info_FloatList.base); ::google::protobuf::internal::InitSCC(&scc_info_Int64List.base); ::google::protobuf::internal::InitSCC(&scc_info_Feature.base); ::google::protobuf::internal::InitSCC(&scc_info_Features_FeatureEntry_DoNotUse.base); ::google::protobuf::internal::InitSCC(&scc_info_Features.base); ::google::protobuf::internal::InitSCC(&scc_info_FeatureList.base); ::google::protobuf::internal::InitSCC(&scc_info_FeatureLists_FeatureListEntry_DoNotUse.base); ::google::protobuf::internal::InitSCC(&scc_info_FeatureLists.base); } ::google::protobuf::Metadata file_level_metadata[9]; const ::google::protobuf::uint32 TableStruct::offsets[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { ~0u, // no _has_bits_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::BytesList, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::BytesList, value_), ~0u, // no _has_bits_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::FloatList, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::FloatList, value_), ~0u, // no _has_bits_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::Int64List, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::Int64List, value_), ~0u, // no _has_bits_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::Feature, _internal_metadata_), ~0u, // no _extensions_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::Feature, _oneof_case_[0]), ~0u, // no _weak_field_map_ offsetof(::diplomacy::tensorflow::FeatureDefaultTypeInternal, bytes_list_), offsetof(::diplomacy::tensorflow::FeatureDefaultTypeInternal, float_list_), offsetof(::diplomacy::tensorflow::FeatureDefaultTypeInternal, int64_list_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::Feature, kind_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::Features_FeatureEntry_DoNotUse, _has_bits_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::Features_FeatureEntry_DoNotUse, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::Features_FeatureEntry_DoNotUse, key_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::Features_FeatureEntry_DoNotUse, value_), 0, 1, ~0u, // no _has_bits_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::Features, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::Features, feature_), ~0u, // no _has_bits_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::FeatureList, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::FeatureList, feature_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::FeatureLists_FeatureListEntry_DoNotUse, _has_bits_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::FeatureLists_FeatureListEntry_DoNotUse, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::FeatureLists_FeatureListEntry_DoNotUse, key_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::FeatureLists_FeatureListEntry_DoNotUse, value_), 0, 1, ~0u, // no _has_bits_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::FeatureLists, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::diplomacy::tensorflow::FeatureLists, feature_list_), }; static const ::google::protobuf::internal::MigrationSchema schemas[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::diplomacy::tensorflow::BytesList)}, { 6, -1, sizeof(::diplomacy::tensorflow::FloatList)}, { 12, -1, sizeof(::diplomacy::tensorflow::Int64List)}, { 18, -1, sizeof(::diplomacy::tensorflow::Feature)}, { 27, 34, sizeof(::diplomacy::tensorflow::Features_FeatureEntry_DoNotUse)}, { 36, -1, sizeof(::diplomacy::tensorflow::Features)}, { 42, -1, sizeof(::diplomacy::tensorflow::FeatureList)}, { 48, 55, sizeof(::diplomacy::tensorflow::FeatureLists_FeatureListEntry_DoNotUse)}, { 57, -1, sizeof(::diplomacy::tensorflow::FeatureLists)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { reinterpret_cast<const ::google::protobuf::Message*>(&::diplomacy::tensorflow::_BytesList_default_instance_), reinterpret_cast<const ::google::protobuf::Message*>(&::diplomacy::tensorflow::_FloatList_default_instance_), reinterpret_cast<const ::google::protobuf::Message*>(&::diplomacy::tensorflow::_Int64List_default_instance_), reinterpret_cast<const ::google::protobuf::Message*>(&::diplomacy::tensorflow::_Feature_default_instance_), reinterpret_cast<const ::google::protobuf::Message*>(&::diplomacy::tensorflow::_Features_FeatureEntry_DoNotUse_default_instance_), reinterpret_cast<const ::google::protobuf::Message*>(&::diplomacy::tensorflow::_Features_default_instance_), reinterpret_cast<const ::google::protobuf::Message*>(&::diplomacy::tensorflow::_FeatureList_default_instance_), reinterpret_cast<const ::google::protobuf::Message*>(&::diplomacy::tensorflow::_FeatureLists_FeatureListEntry_DoNotUse_default_instance_), reinterpret_cast<const ::google::protobuf::Message*>(&::diplomacy::tensorflow::_FeatureLists_default_instance_), }; void protobuf_AssignDescriptors() { AddDescriptors(); AssignDescriptors( "diplomacy_tensorflow/core/example/feature.proto", schemas, file_default_instances, TableStruct::offsets, file_level_metadata, NULL, NULL); } void protobuf_AssignDescriptorsOnce() { static ::google::protobuf::internal::once_flag once; ::google::protobuf::internal::call_once(once, protobuf_AssignDescriptors); } void protobuf_RegisterTypes(const ::std::string&) GOOGLE_PROTOBUF_ATTRIBUTE_COLD; void protobuf_RegisterTypes(const ::std::string&) { protobuf_AssignDescriptorsOnce(); ::google::protobuf::internal::RegisterAllTypes(file_level_metadata, 9); } void AddDescriptorsImpl() { InitDefaults(); static const char descriptor[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { "\n/diplomacy_tensorflow/core/example/feat" "ure.proto\022\024diplomacy.tensorflow\"\032\n\tBytes" "List\022\r\n\005value\030\001 \003(\014\"\036\n\tFloatList\022\021\n\005valu" "e\030\001 \003(\002B\002\020\001\"\036\n\tInt64List\022\021\n\005value\030\001 \003(\003B" "\002\020\001\"\266\001\n\007Feature\0225\n\nbytes_list\030\001 \001(\0132\037.di" "plomacy.tensorflow.BytesListH\000\0225\n\nfloat_" "list\030\002 \001(\0132\037.diplomacy.tensorflow.FloatL" "istH\000\0225\n\nint64_list\030\003 \001(\0132\037.diplomacy.te" "nsorflow.Int64ListH\000B\006\n\004kind\"\227\001\n\010Feature" "s\022<\n\007feature\030\001 \003(\0132+.diplomacy.tensorflo" "w.Features.FeatureEntry\032M\n\014FeatureEntry\022" "\013\n\003key\030\001 \001(\t\022,\n\005value\030\002 \001(\0132\035.diplomacy." "tensorflow.Feature:\0028\001\"=\n\013FeatureList\022.\n" "\007feature\030\001 \003(\0132\035.diplomacy.tensorflow.Fe" "ature\"\260\001\n\014FeatureLists\022I\n\014feature_list\030\001" " \003(\01323.diplomacy.tensorflow.FeatureLists" ".FeatureListEntry\032U\n\020FeatureListEntry\022\013\n" "\003key\030\001 \001(\t\0220\n\005value\030\002 \001(\0132!.diplomacy.te" "nsorflow.FeatureList:\0028\001Bi\n\026org.tensorfl" "ow.exampleB\rFeatureProtosP\001Z;github.com/" "tensorflow/tensorflow/tensorflow/go/core" "/example\370\001\001b\006proto3" }; ::google::protobuf::DescriptorPool::InternalAddGeneratedFile( descriptor, 859); ::google::protobuf::MessageFactory::InternalRegisterGeneratedFile( "diplomacy_tensorflow/core/example/feature.proto", &protobuf_RegisterTypes); } void AddDescriptors() { static ::google::protobuf::internal::once_flag once; ::google::protobuf::internal::call_once(once, AddDescriptorsImpl); } // Force AddDescriptors() to be called at dynamic initialization time. struct StaticDescriptorInitializer { StaticDescriptorInitializer() { AddDescriptors(); } } static_descriptor_initializer; } // namespace protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto namespace diplomacy { namespace tensorflow { // =================================================================== void BytesList::InitAsDefaultInstance() { } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int BytesList::kValueFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 BytesList::BytesList() : ::google::protobuf::Message(), _internal_metadata_(NULL) { ::google::protobuf::internal::InitSCC( &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_BytesList.base); SharedCtor(); // @@protoc_insertion_point(constructor:diplomacy.tensorflow.BytesList) } BytesList::BytesList(::google::protobuf::Arena* arena) : ::google::protobuf::Message(), _internal_metadata_(arena), value_(arena) { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_BytesList.base); SharedCtor(); RegisterArenaDtor(arena); // @@protoc_insertion_point(arena_constructor:diplomacy.tensorflow.BytesList) } BytesList::BytesList(const BytesList& from) : ::google::protobuf::Message(), _internal_metadata_(NULL), value_(from.value_) { _internal_metadata_.MergeFrom(from._internal_metadata_); // @@protoc_insertion_point(copy_constructor:diplomacy.tensorflow.BytesList) } void BytesList::SharedCtor() { } BytesList::~BytesList() { // @@protoc_insertion_point(destructor:diplomacy.tensorflow.BytesList) SharedDtor(); } void BytesList::SharedDtor() { GOOGLE_DCHECK(GetArenaNoVirtual() == NULL); } void BytesList::ArenaDtor(void* object) { BytesList* _this = reinterpret_cast< BytesList* >(object); (void)_this; } void BytesList::RegisterArenaDtor(::google::protobuf::Arena* arena) { } void BytesList::SetCachedSize(int size) const { _cached_size_.Set(size); } const ::google::protobuf::Descriptor* BytesList::descriptor() { ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; } const BytesList& BytesList::default_instance() { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_BytesList.base); return *internal_default_instance(); } void BytesList::Clear() { // @@protoc_insertion_point(message_clear_start:diplomacy.tensorflow.BytesList) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; value_.Clear(); _internal_metadata_.Clear(); } bool BytesList::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; // @@protoc_insertion_point(parse_start:diplomacy.tensorflow.BytesList) for (;;) { ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { // repeated bytes value = 1; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { DO_(::google::protobuf::internal::WireFormatLite::ReadBytes( input, this->add_value())); } else { goto handle_unusual; } break; } default: { handle_unusual: if (tag == 0) { goto success; } DO_(::google::protobuf::internal::WireFormat::SkipField( input, tag, _internal_metadata_.mutable_unknown_fields())); break; } } } success: // @@protoc_insertion_point(parse_success:diplomacy.tensorflow.BytesList) return true; failure: // @@protoc_insertion_point(parse_failure:diplomacy.tensorflow.BytesList) return false; #undef DO_ } void BytesList::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { // @@protoc_insertion_point(serialize_start:diplomacy.tensorflow.BytesList) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // repeated bytes value = 1; for (int i = 0, n = this->value_size(); i < n; i++) { ::google::protobuf::internal::WireFormatLite::WriteBytes( 1, this->value(i), output); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output); } // @@protoc_insertion_point(serialize_end:diplomacy.tensorflow.BytesList) } ::google::protobuf::uint8* BytesList::InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const { (void)deterministic; // Unused // @@protoc_insertion_point(serialize_to_array_start:diplomacy.tensorflow.BytesList) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // repeated bytes value = 1; for (int i = 0, n = this->value_size(); i < n; i++) { target = ::google::protobuf::internal::WireFormatLite:: WriteBytesToArray(1, this->value(i), target); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target); } // @@protoc_insertion_point(serialize_to_array_end:diplomacy.tensorflow.BytesList) return target; } size_t BytesList::ByteSizeLong() const { // @@protoc_insertion_point(message_byte_size_start:diplomacy.tensorflow.BytesList) size_t total_size = 0; if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { total_size += ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance())); } // repeated bytes value = 1; total_size += 1 * ::google::protobuf::internal::FromIntSize(this->value_size()); for (int i = 0, n = this->value_size(); i < n; i++) { total_size += ::google::protobuf::internal::WireFormatLite::BytesSize( this->value(i)); } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; } void BytesList::MergeFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_merge_from_start:diplomacy.tensorflow.BytesList) GOOGLE_DCHECK_NE(&from, this); const BytesList* source = ::google::protobuf::internal::DynamicCastToGenerated<const BytesList>( &from); if (source == NULL) { // @@protoc_insertion_point(generalized_merge_from_cast_fail:diplomacy.tensorflow.BytesList) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { // @@protoc_insertion_point(generalized_merge_from_cast_success:diplomacy.tensorflow.BytesList) MergeFrom(*source); } } void BytesList::MergeFrom(const BytesList& from) { // @@protoc_insertion_point(class_specific_merge_from_start:diplomacy.tensorflow.BytesList) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; value_.MergeFrom(from.value_); } void BytesList::CopyFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_copy_from_start:diplomacy.tensorflow.BytesList) if (&from == this) return; Clear(); MergeFrom(from); } void BytesList::CopyFrom(const BytesList& from) { // @@protoc_insertion_point(class_specific_copy_from_start:diplomacy.tensorflow.BytesList) if (&from == this) return; Clear(); MergeFrom(from); } bool BytesList::IsInitialized() const { return true; } void BytesList::Swap(BytesList* other) { if (other == this) return; if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) { InternalSwap(other); } else { BytesList* temp = New(GetArenaNoVirtual()); temp->MergeFrom(*other); other->CopyFrom(*this); InternalSwap(temp); if (GetArenaNoVirtual() == NULL) { delete temp; } } } void BytesList::UnsafeArenaSwap(BytesList* other) { if (other == this) return; GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual()); InternalSwap(other); } void BytesList::InternalSwap(BytesList* other) { using std::swap; value_.InternalSwap(CastToBase(&other->value_)); _internal_metadata_.Swap(&other->_internal_metadata_); } ::google::protobuf::Metadata BytesList::GetMetadata() const { protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages]; } // =================================================================== void FloatList::InitAsDefaultInstance() { } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int FloatList::kValueFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 FloatList::FloatList() : ::google::protobuf::Message(), _internal_metadata_(NULL) { ::google::protobuf::internal::InitSCC( &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_FloatList.base); SharedCtor(); // @@protoc_insertion_point(constructor:diplomacy.tensorflow.FloatList) } FloatList::FloatList(::google::protobuf::Arena* arena) : ::google::protobuf::Message(), _internal_metadata_(arena), value_(arena) { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_FloatList.base); SharedCtor(); RegisterArenaDtor(arena); // @@protoc_insertion_point(arena_constructor:diplomacy.tensorflow.FloatList) } FloatList::FloatList(const FloatList& from) : ::google::protobuf::Message(), _internal_metadata_(NULL), value_(from.value_) { _internal_metadata_.MergeFrom(from._internal_metadata_); // @@protoc_insertion_point(copy_constructor:diplomacy.tensorflow.FloatList) } void FloatList::SharedCtor() { } FloatList::~FloatList() { // @@protoc_insertion_point(destructor:diplomacy.tensorflow.FloatList) SharedDtor(); } void FloatList::SharedDtor() { GOOGLE_DCHECK(GetArenaNoVirtual() == NULL); } void FloatList::ArenaDtor(void* object) { FloatList* _this = reinterpret_cast< FloatList* >(object); (void)_this; } void FloatList::RegisterArenaDtor(::google::protobuf::Arena* arena) { } void FloatList::SetCachedSize(int size) const { _cached_size_.Set(size); } const ::google::protobuf::Descriptor* FloatList::descriptor() { ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; } const FloatList& FloatList::default_instance() { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_FloatList.base); return *internal_default_instance(); } void FloatList::Clear() { // @@protoc_insertion_point(message_clear_start:diplomacy.tensorflow.FloatList) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; value_.Clear(); _internal_metadata_.Clear(); } bool FloatList::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; // @@protoc_insertion_point(parse_start:diplomacy.tensorflow.FloatList) for (;;) { ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { // repeated float value = 1 [packed = true]; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { DO_((::google::protobuf::internal::WireFormatLite::ReadPackedPrimitive< float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>( input, this->mutable_value()))); } else if ( static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(13u /* 13 & 0xFF */)) { DO_((::google::protobuf::internal::WireFormatLite::ReadRepeatedPrimitiveNoInline< float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>( 1, 10u, input, this->mutable_value()))); } else { goto handle_unusual; } break; } default: { handle_unusual: if (tag == 0) { goto success; } DO_(::google::protobuf::internal::WireFormat::SkipField( input, tag, _internal_metadata_.mutable_unknown_fields())); break; } } } success: // @@protoc_insertion_point(parse_success:diplomacy.tensorflow.FloatList) return true; failure: // @@protoc_insertion_point(parse_failure:diplomacy.tensorflow.FloatList) return false; #undef DO_ } void FloatList::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { // @@protoc_insertion_point(serialize_start:diplomacy.tensorflow.FloatList) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // repeated float value = 1 [packed = true]; if (this->value_size() > 0) { ::google::protobuf::internal::WireFormatLite::WriteTag(1, ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, output); output->WriteVarint32(static_cast< ::google::protobuf::uint32>( _value_cached_byte_size_)); ::google::protobuf::internal::WireFormatLite::WriteFloatArray( this->value().data(), this->value_size(), output); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output); } // @@protoc_insertion_point(serialize_end:diplomacy.tensorflow.FloatList) } ::google::protobuf::uint8* FloatList::InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const { (void)deterministic; // Unused // @@protoc_insertion_point(serialize_to_array_start:diplomacy.tensorflow.FloatList) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // repeated float value = 1 [packed = true]; if (this->value_size() > 0) { target = ::google::protobuf::internal::WireFormatLite::WriteTagToArray( 1, ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, target); target = ::google::protobuf::io::CodedOutputStream::WriteVarint32ToArray( static_cast< ::google::protobuf::int32>( _value_cached_byte_size_), target); target = ::google::protobuf::internal::WireFormatLite:: WriteFloatNoTagToArray(this->value_, target); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target); } // @@protoc_insertion_point(serialize_to_array_end:diplomacy.tensorflow.FloatList) return target; } size_t FloatList::ByteSizeLong() const { // @@protoc_insertion_point(message_byte_size_start:diplomacy.tensorflow.FloatList) size_t total_size = 0; if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { total_size += ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance())); } // repeated float value = 1 [packed = true]; { unsigned int count = static_cast<unsigned int>(this->value_size()); size_t data_size = 4UL * count; if (data_size > 0) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int32Size( static_cast< ::google::protobuf::int32>(data_size)); } int cached_size = ::google::protobuf::internal::ToCachedSize(data_size); GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); _value_cached_byte_size_ = cached_size; GOOGLE_SAFE_CONCURRENT_WRITES_END(); total_size += data_size; } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; } void FloatList::MergeFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_merge_from_start:diplomacy.tensorflow.FloatList) GOOGLE_DCHECK_NE(&from, this); const FloatList* source = ::google::protobuf::internal::DynamicCastToGenerated<const FloatList>( &from); if (source == NULL) { // @@protoc_insertion_point(generalized_merge_from_cast_fail:diplomacy.tensorflow.FloatList) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { // @@protoc_insertion_point(generalized_merge_from_cast_success:diplomacy.tensorflow.FloatList) MergeFrom(*source); } } void FloatList::MergeFrom(const FloatList& from) { // @@protoc_insertion_point(class_specific_merge_from_start:diplomacy.tensorflow.FloatList) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; value_.MergeFrom(from.value_); } void FloatList::CopyFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_copy_from_start:diplomacy.tensorflow.FloatList) if (&from == this) return; Clear(); MergeFrom(from); } void FloatList::CopyFrom(const FloatList& from) { // @@protoc_insertion_point(class_specific_copy_from_start:diplomacy.tensorflow.FloatList) if (&from == this) return; Clear(); MergeFrom(from); } bool FloatList::IsInitialized() const { return true; } void FloatList::Swap(FloatList* other) { if (other == this) return; if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) { InternalSwap(other); } else { FloatList* temp = New(GetArenaNoVirtual()); temp->MergeFrom(*other); other->CopyFrom(*this); InternalSwap(temp); if (GetArenaNoVirtual() == NULL) { delete temp; } } } void FloatList::UnsafeArenaSwap(FloatList* other) {<|fim▁hole|>} void FloatList::InternalSwap(FloatList* other) { using std::swap; value_.InternalSwap(&other->value_); _internal_metadata_.Swap(&other->_internal_metadata_); } ::google::protobuf::Metadata FloatList::GetMetadata() const { protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages]; } // =================================================================== void Int64List::InitAsDefaultInstance() { } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int Int64List::kValueFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 Int64List::Int64List() : ::google::protobuf::Message(), _internal_metadata_(NULL) { ::google::protobuf::internal::InitSCC( &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Int64List.base); SharedCtor(); // @@protoc_insertion_point(constructor:diplomacy.tensorflow.Int64List) } Int64List::Int64List(::google::protobuf::Arena* arena) : ::google::protobuf::Message(), _internal_metadata_(arena), value_(arena) { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Int64List.base); SharedCtor(); RegisterArenaDtor(arena); // @@protoc_insertion_point(arena_constructor:diplomacy.tensorflow.Int64List) } Int64List::Int64List(const Int64List& from) : ::google::protobuf::Message(), _internal_metadata_(NULL), value_(from.value_) { _internal_metadata_.MergeFrom(from._internal_metadata_); // @@protoc_insertion_point(copy_constructor:diplomacy.tensorflow.Int64List) } void Int64List::SharedCtor() { } Int64List::~Int64List() { // @@protoc_insertion_point(destructor:diplomacy.tensorflow.Int64List) SharedDtor(); } void Int64List::SharedDtor() { GOOGLE_DCHECK(GetArenaNoVirtual() == NULL); } void Int64List::ArenaDtor(void* object) { Int64List* _this = reinterpret_cast< Int64List* >(object); (void)_this; } void Int64List::RegisterArenaDtor(::google::protobuf::Arena* arena) { } void Int64List::SetCachedSize(int size) const { _cached_size_.Set(size); } const ::google::protobuf::Descriptor* Int64List::descriptor() { ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; } const Int64List& Int64List::default_instance() { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Int64List.base); return *internal_default_instance(); } void Int64List::Clear() { // @@protoc_insertion_point(message_clear_start:diplomacy.tensorflow.Int64List) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; value_.Clear(); _internal_metadata_.Clear(); } bool Int64List::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; // @@protoc_insertion_point(parse_start:diplomacy.tensorflow.Int64List) for (;;) { ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { // repeated int64 value = 1 [packed = true]; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { DO_((::google::protobuf::internal::WireFormatLite::ReadPackedPrimitive< ::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>( input, this->mutable_value()))); } else if ( static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(8u /* 8 & 0xFF */)) { DO_((::google::protobuf::internal::WireFormatLite::ReadRepeatedPrimitiveNoInline< ::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>( 1, 10u, input, this->mutable_value()))); } else { goto handle_unusual; } break; } default: { handle_unusual: if (tag == 0) { goto success; } DO_(::google::protobuf::internal::WireFormat::SkipField( input, tag, _internal_metadata_.mutable_unknown_fields())); break; } } } success: // @@protoc_insertion_point(parse_success:diplomacy.tensorflow.Int64List) return true; failure: // @@protoc_insertion_point(parse_failure:diplomacy.tensorflow.Int64List) return false; #undef DO_ } void Int64List::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { // @@protoc_insertion_point(serialize_start:diplomacy.tensorflow.Int64List) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // repeated int64 value = 1 [packed = true]; if (this->value_size() > 0) { ::google::protobuf::internal::WireFormatLite::WriteTag(1, ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, output); output->WriteVarint32(static_cast< ::google::protobuf::uint32>( _value_cached_byte_size_)); } for (int i = 0, n = this->value_size(); i < n; i++) { ::google::protobuf::internal::WireFormatLite::WriteInt64NoTag( this->value(i), output); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output); } // @@protoc_insertion_point(serialize_end:diplomacy.tensorflow.Int64List) } ::google::protobuf::uint8* Int64List::InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const { (void)deterministic; // Unused // @@protoc_insertion_point(serialize_to_array_start:diplomacy.tensorflow.Int64List) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // repeated int64 value = 1 [packed = true]; if (this->value_size() > 0) { target = ::google::protobuf::internal::WireFormatLite::WriteTagToArray( 1, ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, target); target = ::google::protobuf::io::CodedOutputStream::WriteVarint32ToArray( static_cast< ::google::protobuf::int32>( _value_cached_byte_size_), target); target = ::google::protobuf::internal::WireFormatLite:: WriteInt64NoTagToArray(this->value_, target); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target); } // @@protoc_insertion_point(serialize_to_array_end:diplomacy.tensorflow.Int64List) return target; } size_t Int64List::ByteSizeLong() const { // @@protoc_insertion_point(message_byte_size_start:diplomacy.tensorflow.Int64List) size_t total_size = 0; if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { total_size += ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance())); } // repeated int64 value = 1 [packed = true]; { size_t data_size = ::google::protobuf::internal::WireFormatLite:: Int64Size(this->value_); if (data_size > 0) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::Int32Size( static_cast< ::google::protobuf::int32>(data_size)); } int cached_size = ::google::protobuf::internal::ToCachedSize(data_size); GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); _value_cached_byte_size_ = cached_size; GOOGLE_SAFE_CONCURRENT_WRITES_END(); total_size += data_size; } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; } void Int64List::MergeFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_merge_from_start:diplomacy.tensorflow.Int64List) GOOGLE_DCHECK_NE(&from, this); const Int64List* source = ::google::protobuf::internal::DynamicCastToGenerated<const Int64List>( &from); if (source == NULL) { // @@protoc_insertion_point(generalized_merge_from_cast_fail:diplomacy.tensorflow.Int64List) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { // @@protoc_insertion_point(generalized_merge_from_cast_success:diplomacy.tensorflow.Int64List) MergeFrom(*source); } } void Int64List::MergeFrom(const Int64List& from) { // @@protoc_insertion_point(class_specific_merge_from_start:diplomacy.tensorflow.Int64List) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; value_.MergeFrom(from.value_); } void Int64List::CopyFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_copy_from_start:diplomacy.tensorflow.Int64List) if (&from == this) return; Clear(); MergeFrom(from); } void Int64List::CopyFrom(const Int64List& from) { // @@protoc_insertion_point(class_specific_copy_from_start:diplomacy.tensorflow.Int64List) if (&from == this) return; Clear(); MergeFrom(from); } bool Int64List::IsInitialized() const { return true; } void Int64List::Swap(Int64List* other) { if (other == this) return; if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) { InternalSwap(other); } else { Int64List* temp = New(GetArenaNoVirtual()); temp->MergeFrom(*other); other->CopyFrom(*this); InternalSwap(temp); if (GetArenaNoVirtual() == NULL) { delete temp; } } } void Int64List::UnsafeArenaSwap(Int64List* other) { if (other == this) return; GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual()); InternalSwap(other); } void Int64List::InternalSwap(Int64List* other) { using std::swap; value_.InternalSwap(&other->value_); _internal_metadata_.Swap(&other->_internal_metadata_); } ::google::protobuf::Metadata Int64List::GetMetadata() const { protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages]; } // =================================================================== void Feature::InitAsDefaultInstance() { ::diplomacy::tensorflow::_Feature_default_instance_.bytes_list_ = const_cast< ::diplomacy::tensorflow::BytesList*>( ::diplomacy::tensorflow::BytesList::internal_default_instance()); ::diplomacy::tensorflow::_Feature_default_instance_.float_list_ = const_cast< ::diplomacy::tensorflow::FloatList*>( ::diplomacy::tensorflow::FloatList::internal_default_instance()); ::diplomacy::tensorflow::_Feature_default_instance_.int64_list_ = const_cast< ::diplomacy::tensorflow::Int64List*>( ::diplomacy::tensorflow::Int64List::internal_default_instance()); } void Feature::set_allocated_bytes_list(::diplomacy::tensorflow::BytesList* bytes_list) { ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); clear_kind(); if (bytes_list) { ::google::protobuf::Arena* submessage_arena = ::google::protobuf::Arena::GetArena(bytes_list); if (message_arena != submessage_arena) { bytes_list = ::google::protobuf::internal::GetOwnedMessage( message_arena, bytes_list, submessage_arena); } set_has_bytes_list(); kind_.bytes_list_ = bytes_list; } // @@protoc_insertion_point(field_set_allocated:diplomacy.tensorflow.Feature.bytes_list) } void Feature::set_allocated_float_list(::diplomacy::tensorflow::FloatList* float_list) { ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); clear_kind(); if (float_list) { ::google::protobuf::Arena* submessage_arena = ::google::protobuf::Arena::GetArena(float_list); if (message_arena != submessage_arena) { float_list = ::google::protobuf::internal::GetOwnedMessage( message_arena, float_list, submessage_arena); } set_has_float_list(); kind_.float_list_ = float_list; } // @@protoc_insertion_point(field_set_allocated:diplomacy.tensorflow.Feature.float_list) } void Feature::set_allocated_int64_list(::diplomacy::tensorflow::Int64List* int64_list) { ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); clear_kind(); if (int64_list) { ::google::protobuf::Arena* submessage_arena = ::google::protobuf::Arena::GetArena(int64_list); if (message_arena != submessage_arena) { int64_list = ::google::protobuf::internal::GetOwnedMessage( message_arena, int64_list, submessage_arena); } set_has_int64_list(); kind_.int64_list_ = int64_list; } // @@protoc_insertion_point(field_set_allocated:diplomacy.tensorflow.Feature.int64_list) } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int Feature::kBytesListFieldNumber; const int Feature::kFloatListFieldNumber; const int Feature::kInt64ListFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 Feature::Feature() : ::google::protobuf::Message(), _internal_metadata_(NULL) { ::google::protobuf::internal::InitSCC( &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Feature.base); SharedCtor(); // @@protoc_insertion_point(constructor:diplomacy.tensorflow.Feature) } Feature::Feature(::google::protobuf::Arena* arena) : ::google::protobuf::Message(), _internal_metadata_(arena) { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Feature.base); SharedCtor(); RegisterArenaDtor(arena); // @@protoc_insertion_point(arena_constructor:diplomacy.tensorflow.Feature) } Feature::Feature(const Feature& from) : ::google::protobuf::Message(), _internal_metadata_(NULL) { _internal_metadata_.MergeFrom(from._internal_metadata_); clear_has_kind(); switch (from.kind_case()) { case kBytesList: { mutable_bytes_list()->::diplomacy::tensorflow::BytesList::MergeFrom(from.bytes_list()); break; } case kFloatList: { mutable_float_list()->::diplomacy::tensorflow::FloatList::MergeFrom(from.float_list()); break; } case kInt64List: { mutable_int64_list()->::diplomacy::tensorflow::Int64List::MergeFrom(from.int64_list()); break; } case KIND_NOT_SET: { break; } } // @@protoc_insertion_point(copy_constructor:diplomacy.tensorflow.Feature) } void Feature::SharedCtor() { clear_has_kind(); } Feature::~Feature() { // @@protoc_insertion_point(destructor:diplomacy.tensorflow.Feature) SharedDtor(); } void Feature::SharedDtor() { GOOGLE_DCHECK(GetArenaNoVirtual() == NULL); if (has_kind()) { clear_kind(); } } void Feature::ArenaDtor(void* object) { Feature* _this = reinterpret_cast< Feature* >(object); (void)_this; } void Feature::RegisterArenaDtor(::google::protobuf::Arena* arena) { } void Feature::SetCachedSize(int size) const { _cached_size_.Set(size); } const ::google::protobuf::Descriptor* Feature::descriptor() { ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; } const Feature& Feature::default_instance() { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Feature.base); return *internal_default_instance(); } void Feature::clear_kind() { // @@protoc_insertion_point(one_of_clear_start:diplomacy.tensorflow.Feature) switch (kind_case()) { case kBytesList: { if (GetArenaNoVirtual() == NULL) { delete kind_.bytes_list_; } break; } case kFloatList: { if (GetArenaNoVirtual() == NULL) { delete kind_.float_list_; } break; } case kInt64List: { if (GetArenaNoVirtual() == NULL) { delete kind_.int64_list_; } break; } case KIND_NOT_SET: { break; } } _oneof_case_[0] = KIND_NOT_SET; } void Feature::Clear() { // @@protoc_insertion_point(message_clear_start:diplomacy.tensorflow.Feature) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; clear_kind(); _internal_metadata_.Clear(); } bool Feature::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; // @@protoc_insertion_point(parse_start:diplomacy.tensorflow.Feature) for (;;) { ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { // .diplomacy.tensorflow.BytesList bytes_list = 1; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( input, mutable_bytes_list())); } else { goto handle_unusual; } break; } // .diplomacy.tensorflow.FloatList float_list = 2; case 2: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(18u /* 18 & 0xFF */)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( input, mutable_float_list())); } else { goto handle_unusual; } break; } // .diplomacy.tensorflow.Int64List int64_list = 3; case 3: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(26u /* 26 & 0xFF */)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( input, mutable_int64_list())); } else { goto handle_unusual; } break; } default: { handle_unusual: if (tag == 0) { goto success; } DO_(::google::protobuf::internal::WireFormat::SkipField( input, tag, _internal_metadata_.mutable_unknown_fields())); break; } } } success: // @@protoc_insertion_point(parse_success:diplomacy.tensorflow.Feature) return true; failure: // @@protoc_insertion_point(parse_failure:diplomacy.tensorflow.Feature) return false; #undef DO_ } void Feature::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { // @@protoc_insertion_point(serialize_start:diplomacy.tensorflow.Feature) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // .diplomacy.tensorflow.BytesList bytes_list = 1; if (has_bytes_list()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( 1, this->_internal_bytes_list(), output); } // .diplomacy.tensorflow.FloatList float_list = 2; if (has_float_list()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( 2, this->_internal_float_list(), output); } // .diplomacy.tensorflow.Int64List int64_list = 3; if (has_int64_list()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( 3, this->_internal_int64_list(), output); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output); } // @@protoc_insertion_point(serialize_end:diplomacy.tensorflow.Feature) } ::google::protobuf::uint8* Feature::InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const { (void)deterministic; // Unused // @@protoc_insertion_point(serialize_to_array_start:diplomacy.tensorflow.Feature) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // .diplomacy.tensorflow.BytesList bytes_list = 1; if (has_bytes_list()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( 1, this->_internal_bytes_list(), deterministic, target); } // .diplomacy.tensorflow.FloatList float_list = 2; if (has_float_list()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( 2, this->_internal_float_list(), deterministic, target); } // .diplomacy.tensorflow.Int64List int64_list = 3; if (has_int64_list()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( 3, this->_internal_int64_list(), deterministic, target); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target); } // @@protoc_insertion_point(serialize_to_array_end:diplomacy.tensorflow.Feature) return target; } size_t Feature::ByteSizeLong() const { // @@protoc_insertion_point(message_byte_size_start:diplomacy.tensorflow.Feature) size_t total_size = 0; if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { total_size += ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance())); } switch (kind_case()) { // .diplomacy.tensorflow.BytesList bytes_list = 1; case kBytesList: { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( *kind_.bytes_list_); break; } // .diplomacy.tensorflow.FloatList float_list = 2; case kFloatList: { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( *kind_.float_list_); break; } // .diplomacy.tensorflow.Int64List int64_list = 3; case kInt64List: { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( *kind_.int64_list_); break; } case KIND_NOT_SET: { break; } } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; } void Feature::MergeFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_merge_from_start:diplomacy.tensorflow.Feature) GOOGLE_DCHECK_NE(&from, this); const Feature* source = ::google::protobuf::internal::DynamicCastToGenerated<const Feature>( &from); if (source == NULL) { // @@protoc_insertion_point(generalized_merge_from_cast_fail:diplomacy.tensorflow.Feature) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { // @@protoc_insertion_point(generalized_merge_from_cast_success:diplomacy.tensorflow.Feature) MergeFrom(*source); } } void Feature::MergeFrom(const Feature& from) { // @@protoc_insertion_point(class_specific_merge_from_start:diplomacy.tensorflow.Feature) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; switch (from.kind_case()) { case kBytesList: { mutable_bytes_list()->::diplomacy::tensorflow::BytesList::MergeFrom(from.bytes_list()); break; } case kFloatList: { mutable_float_list()->::diplomacy::tensorflow::FloatList::MergeFrom(from.float_list()); break; } case kInt64List: { mutable_int64_list()->::diplomacy::tensorflow::Int64List::MergeFrom(from.int64_list()); break; } case KIND_NOT_SET: { break; } } } void Feature::CopyFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_copy_from_start:diplomacy.tensorflow.Feature) if (&from == this) return; Clear(); MergeFrom(from); } void Feature::CopyFrom(const Feature& from) { // @@protoc_insertion_point(class_specific_copy_from_start:diplomacy.tensorflow.Feature) if (&from == this) return; Clear(); MergeFrom(from); } bool Feature::IsInitialized() const { return true; } void Feature::Swap(Feature* other) { if (other == this) return; if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) { InternalSwap(other); } else { Feature* temp = New(GetArenaNoVirtual()); temp->MergeFrom(*other); other->CopyFrom(*this); InternalSwap(temp); if (GetArenaNoVirtual() == NULL) { delete temp; } } } void Feature::UnsafeArenaSwap(Feature* other) { if (other == this) return; GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual()); InternalSwap(other); } void Feature::InternalSwap(Feature* other) { using std::swap; swap(kind_, other->kind_); swap(_oneof_case_[0], other->_oneof_case_[0]); _internal_metadata_.Swap(&other->_internal_metadata_); } ::google::protobuf::Metadata Feature::GetMetadata() const { protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages]; } // =================================================================== Features_FeatureEntry_DoNotUse::Features_FeatureEntry_DoNotUse() {} Features_FeatureEntry_DoNotUse::Features_FeatureEntry_DoNotUse(::google::protobuf::Arena* arena) : SuperType(arena) {} void Features_FeatureEntry_DoNotUse::MergeFrom(const Features_FeatureEntry_DoNotUse& other) { MergeFromInternal(other); } ::google::protobuf::Metadata Features_FeatureEntry_DoNotUse::GetMetadata() const { ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[4]; } void Features_FeatureEntry_DoNotUse::MergeFrom( const ::google::protobuf::Message& other) { ::google::protobuf::Message::MergeFrom(other); } // =================================================================== void Features::InitAsDefaultInstance() { } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int Features::kFeatureFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 Features::Features() : ::google::protobuf::Message(), _internal_metadata_(NULL) { ::google::protobuf::internal::InitSCC( &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Features.base); SharedCtor(); // @@protoc_insertion_point(constructor:diplomacy.tensorflow.Features) } Features::Features(::google::protobuf::Arena* arena) : ::google::protobuf::Message(), _internal_metadata_(arena), feature_(arena) { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Features.base); SharedCtor(); RegisterArenaDtor(arena); // @@protoc_insertion_point(arena_constructor:diplomacy.tensorflow.Features) } Features::Features(const Features& from) : ::google::protobuf::Message(), _internal_metadata_(NULL) { _internal_metadata_.MergeFrom(from._internal_metadata_); feature_.MergeFrom(from.feature_); // @@protoc_insertion_point(copy_constructor:diplomacy.tensorflow.Features) } void Features::SharedCtor() { } Features::~Features() { // @@protoc_insertion_point(destructor:diplomacy.tensorflow.Features) SharedDtor(); } void Features::SharedDtor() { GOOGLE_DCHECK(GetArenaNoVirtual() == NULL); } void Features::ArenaDtor(void* object) { Features* _this = reinterpret_cast< Features* >(object); (void)_this; } void Features::RegisterArenaDtor(::google::protobuf::Arena* arena) { } void Features::SetCachedSize(int size) const { _cached_size_.Set(size); } const ::google::protobuf::Descriptor* Features::descriptor() { ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; } const Features& Features::default_instance() { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_Features.base); return *internal_default_instance(); } void Features::Clear() { // @@protoc_insertion_point(message_clear_start:diplomacy.tensorflow.Features) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; feature_.Clear(); _internal_metadata_.Clear(); } bool Features::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; // @@protoc_insertion_point(parse_start:diplomacy.tensorflow.Features) for (;;) { ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { // map<string, .diplomacy.tensorflow.Feature> feature = 1; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { Features_FeatureEntry_DoNotUse::Parser< ::google::protobuf::internal::MapField< Features_FeatureEntry_DoNotUse, ::std::string, ::diplomacy::tensorflow::Feature, ::google::protobuf::internal::WireFormatLite::TYPE_STRING, ::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE, 0 >, ::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::Feature > > parser(&feature_); DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( input, &parser)); DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( parser.key().data(), static_cast<int>(parser.key().length()), ::google::protobuf::internal::WireFormatLite::PARSE, "diplomacy.tensorflow.Features.FeatureEntry.key")); } else { goto handle_unusual; } break; } default: { handle_unusual: if (tag == 0) { goto success; } DO_(::google::protobuf::internal::WireFormat::SkipField( input, tag, _internal_metadata_.mutable_unknown_fields())); break; } } } success: // @@protoc_insertion_point(parse_success:diplomacy.tensorflow.Features) return true; failure: // @@protoc_insertion_point(parse_failure:diplomacy.tensorflow.Features) return false; #undef DO_ } void Features::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { // @@protoc_insertion_point(serialize_start:diplomacy.tensorflow.Features) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // map<string, .diplomacy.tensorflow.Feature> feature = 1; if (!this->feature().empty()) { typedef ::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::Feature >::const_pointer ConstPtr; typedef ConstPtr SortItem; typedef ::google::protobuf::internal::CompareByDerefFirst<SortItem> Less; struct Utf8Check { static void Check(ConstPtr p) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( p->first.data(), static_cast<int>(p->first.length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, "diplomacy.tensorflow.Features.FeatureEntry.key"); } }; if (output->IsSerializationDeterministic() && this->feature().size() > 1) { ::std::unique_ptr<SortItem[]> items( new SortItem[this->feature().size()]); typedef ::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::Feature >::size_type size_type; size_type n = 0; for (::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::Feature >::const_iterator it = this->feature().begin(); it != this->feature().end(); ++it, ++n) { items[static_cast<ptrdiff_t>(n)] = SortItem(&*it); } ::std::sort(&items[0], &items[static_cast<ptrdiff_t>(n)], Less()); ::std::unique_ptr<Features_FeatureEntry_DoNotUse> entry; for (size_type i = 0; i < n; i++) { entry.reset(feature_.NewEntryWrapper( items[static_cast<ptrdiff_t>(i)]->first, items[static_cast<ptrdiff_t>(i)]->second)); ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( 1, *entry, output); if (entry->GetArena() != NULL) { entry.release(); } Utf8Check::Check(items[static_cast<ptrdiff_t>(i)]); } } else { ::std::unique_ptr<Features_FeatureEntry_DoNotUse> entry; for (::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::Feature >::const_iterator it = this->feature().begin(); it != this->feature().end(); ++it) { entry.reset(feature_.NewEntryWrapper( it->first, it->second)); ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( 1, *entry, output); if (entry->GetArena() != NULL) { entry.release(); } Utf8Check::Check(&*it); } } } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output); } // @@protoc_insertion_point(serialize_end:diplomacy.tensorflow.Features) } ::google::protobuf::uint8* Features::InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const { (void)deterministic; // Unused // @@protoc_insertion_point(serialize_to_array_start:diplomacy.tensorflow.Features) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // map<string, .diplomacy.tensorflow.Feature> feature = 1; if (!this->feature().empty()) { typedef ::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::Feature >::const_pointer ConstPtr; typedef ConstPtr SortItem; typedef ::google::protobuf::internal::CompareByDerefFirst<SortItem> Less; struct Utf8Check { static void Check(ConstPtr p) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( p->first.data(), static_cast<int>(p->first.length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, "diplomacy.tensorflow.Features.FeatureEntry.key"); } }; if (deterministic && this->feature().size() > 1) { ::std::unique_ptr<SortItem[]> items( new SortItem[this->feature().size()]); typedef ::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::Feature >::size_type size_type; size_type n = 0; for (::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::Feature >::const_iterator it = this->feature().begin(); it != this->feature().end(); ++it, ++n) { items[static_cast<ptrdiff_t>(n)] = SortItem(&*it); } ::std::sort(&items[0], &items[static_cast<ptrdiff_t>(n)], Less()); ::std::unique_ptr<Features_FeatureEntry_DoNotUse> entry; for (size_type i = 0; i < n; i++) { entry.reset(feature_.NewEntryWrapper( items[static_cast<ptrdiff_t>(i)]->first, items[static_cast<ptrdiff_t>(i)]->second)); target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageNoVirtualToArray( 1, *entry, deterministic, target); ; if (entry->GetArena() != NULL) { entry.release(); } Utf8Check::Check(items[static_cast<ptrdiff_t>(i)]); } } else { ::std::unique_ptr<Features_FeatureEntry_DoNotUse> entry; for (::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::Feature >::const_iterator it = this->feature().begin(); it != this->feature().end(); ++it) { entry.reset(feature_.NewEntryWrapper( it->first, it->second)); target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageNoVirtualToArray( 1, *entry, deterministic, target); ; if (entry->GetArena() != NULL) { entry.release(); } Utf8Check::Check(&*it); } } } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target); } // @@protoc_insertion_point(serialize_to_array_end:diplomacy.tensorflow.Features) return target; } size_t Features::ByteSizeLong() const { // @@protoc_insertion_point(message_byte_size_start:diplomacy.tensorflow.Features) size_t total_size = 0; if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { total_size += ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance())); } // map<string, .diplomacy.tensorflow.Feature> feature = 1; total_size += 1 * ::google::protobuf::internal::FromIntSize(this->feature_size()); { ::std::unique_ptr<Features_FeatureEntry_DoNotUse> entry; for (::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::Feature >::const_iterator it = this->feature().begin(); it != this->feature().end(); ++it) { if (entry.get() != NULL && entry->GetArena() != NULL) { entry.release(); } entry.reset(feature_.NewEntryWrapper(it->first, it->second)); total_size += ::google::protobuf::internal::WireFormatLite:: MessageSizeNoVirtual(*entry); } if (entry.get() != NULL && entry->GetArena() != NULL) { entry.release(); } } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; } void Features::MergeFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_merge_from_start:diplomacy.tensorflow.Features) GOOGLE_DCHECK_NE(&from, this); const Features* source = ::google::protobuf::internal::DynamicCastToGenerated<const Features>( &from); if (source == NULL) { // @@protoc_insertion_point(generalized_merge_from_cast_fail:diplomacy.tensorflow.Features) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { // @@protoc_insertion_point(generalized_merge_from_cast_success:diplomacy.tensorflow.Features) MergeFrom(*source); } } void Features::MergeFrom(const Features& from) { // @@protoc_insertion_point(class_specific_merge_from_start:diplomacy.tensorflow.Features) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; feature_.MergeFrom(from.feature_); } void Features::CopyFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_copy_from_start:diplomacy.tensorflow.Features) if (&from == this) return; Clear(); MergeFrom(from); } void Features::CopyFrom(const Features& from) { // @@protoc_insertion_point(class_specific_copy_from_start:diplomacy.tensorflow.Features) if (&from == this) return; Clear(); MergeFrom(from); } bool Features::IsInitialized() const { return true; } void Features::Swap(Features* other) { if (other == this) return; if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) { InternalSwap(other); } else { Features* temp = New(GetArenaNoVirtual()); temp->MergeFrom(*other); other->CopyFrom(*this); InternalSwap(temp); if (GetArenaNoVirtual() == NULL) { delete temp; } } } void Features::UnsafeArenaSwap(Features* other) { if (other == this) return; GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual()); InternalSwap(other); } void Features::InternalSwap(Features* other) { using std::swap; feature_.Swap(&other->feature_); _internal_metadata_.Swap(&other->_internal_metadata_); } ::google::protobuf::Metadata Features::GetMetadata() const { protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages]; } // =================================================================== void FeatureList::InitAsDefaultInstance() { } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int FeatureList::kFeatureFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 FeatureList::FeatureList() : ::google::protobuf::Message(), _internal_metadata_(NULL) { ::google::protobuf::internal::InitSCC( &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_FeatureList.base); SharedCtor(); // @@protoc_insertion_point(constructor:diplomacy.tensorflow.FeatureList) } FeatureList::FeatureList(::google::protobuf::Arena* arena) : ::google::protobuf::Message(), _internal_metadata_(arena), feature_(arena) { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_FeatureList.base); SharedCtor(); RegisterArenaDtor(arena); // @@protoc_insertion_point(arena_constructor:diplomacy.tensorflow.FeatureList) } FeatureList::FeatureList(const FeatureList& from) : ::google::protobuf::Message(), _internal_metadata_(NULL), feature_(from.feature_) { _internal_metadata_.MergeFrom(from._internal_metadata_); // @@protoc_insertion_point(copy_constructor:diplomacy.tensorflow.FeatureList) } void FeatureList::SharedCtor() { } FeatureList::~FeatureList() { // @@protoc_insertion_point(destructor:diplomacy.tensorflow.FeatureList) SharedDtor(); } void FeatureList::SharedDtor() { GOOGLE_DCHECK(GetArenaNoVirtual() == NULL); } void FeatureList::ArenaDtor(void* object) { FeatureList* _this = reinterpret_cast< FeatureList* >(object); (void)_this; } void FeatureList::RegisterArenaDtor(::google::protobuf::Arena* arena) { } void FeatureList::SetCachedSize(int size) const { _cached_size_.Set(size); } const ::google::protobuf::Descriptor* FeatureList::descriptor() { ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; } const FeatureList& FeatureList::default_instance() { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_FeatureList.base); return *internal_default_instance(); } void FeatureList::Clear() { // @@protoc_insertion_point(message_clear_start:diplomacy.tensorflow.FeatureList) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; feature_.Clear(); _internal_metadata_.Clear(); } bool FeatureList::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; // @@protoc_insertion_point(parse_start:diplomacy.tensorflow.FeatureList) for (;;) { ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { // repeated .diplomacy.tensorflow.Feature feature = 1; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( input, add_feature())); } else { goto handle_unusual; } break; } default: { handle_unusual: if (tag == 0) { goto success; } DO_(::google::protobuf::internal::WireFormat::SkipField( input, tag, _internal_metadata_.mutable_unknown_fields())); break; } } } success: // @@protoc_insertion_point(parse_success:diplomacy.tensorflow.FeatureList) return true; failure: // @@protoc_insertion_point(parse_failure:diplomacy.tensorflow.FeatureList) return false; #undef DO_ } void FeatureList::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { // @@protoc_insertion_point(serialize_start:diplomacy.tensorflow.FeatureList) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // repeated .diplomacy.tensorflow.Feature feature = 1; for (unsigned int i = 0, n = static_cast<unsigned int>(this->feature_size()); i < n; i++) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( 1, this->feature(static_cast<int>(i)), output); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output); } // @@protoc_insertion_point(serialize_end:diplomacy.tensorflow.FeatureList) } ::google::protobuf::uint8* FeatureList::InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const { (void)deterministic; // Unused // @@protoc_insertion_point(serialize_to_array_start:diplomacy.tensorflow.FeatureList) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // repeated .diplomacy.tensorflow.Feature feature = 1; for (unsigned int i = 0, n = static_cast<unsigned int>(this->feature_size()); i < n; i++) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( 1, this->feature(static_cast<int>(i)), deterministic, target); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target); } // @@protoc_insertion_point(serialize_to_array_end:diplomacy.tensorflow.FeatureList) return target; } size_t FeatureList::ByteSizeLong() const { // @@protoc_insertion_point(message_byte_size_start:diplomacy.tensorflow.FeatureList) size_t total_size = 0; if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { total_size += ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance())); } // repeated .diplomacy.tensorflow.Feature feature = 1; { unsigned int count = static_cast<unsigned int>(this->feature_size()); total_size += 1UL * count; for (unsigned int i = 0; i < count; i++) { total_size += ::google::protobuf::internal::WireFormatLite::MessageSize( this->feature(static_cast<int>(i))); } } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; } void FeatureList::MergeFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_merge_from_start:diplomacy.tensorflow.FeatureList) GOOGLE_DCHECK_NE(&from, this); const FeatureList* source = ::google::protobuf::internal::DynamicCastToGenerated<const FeatureList>( &from); if (source == NULL) { // @@protoc_insertion_point(generalized_merge_from_cast_fail:diplomacy.tensorflow.FeatureList) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { // @@protoc_insertion_point(generalized_merge_from_cast_success:diplomacy.tensorflow.FeatureList) MergeFrom(*source); } } void FeatureList::MergeFrom(const FeatureList& from) { // @@protoc_insertion_point(class_specific_merge_from_start:diplomacy.tensorflow.FeatureList) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; feature_.MergeFrom(from.feature_); } void FeatureList::CopyFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_copy_from_start:diplomacy.tensorflow.FeatureList) if (&from == this) return; Clear(); MergeFrom(from); } void FeatureList::CopyFrom(const FeatureList& from) { // @@protoc_insertion_point(class_specific_copy_from_start:diplomacy.tensorflow.FeatureList) if (&from == this) return; Clear(); MergeFrom(from); } bool FeatureList::IsInitialized() const { return true; } void FeatureList::Swap(FeatureList* other) { if (other == this) return; if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) { InternalSwap(other); } else { FeatureList* temp = New(GetArenaNoVirtual()); temp->MergeFrom(*other); other->CopyFrom(*this); InternalSwap(temp); if (GetArenaNoVirtual() == NULL) { delete temp; } } } void FeatureList::UnsafeArenaSwap(FeatureList* other) { if (other == this) return; GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual()); InternalSwap(other); } void FeatureList::InternalSwap(FeatureList* other) { using std::swap; CastToBase(&feature_)->InternalSwap(CastToBase(&other->feature_)); _internal_metadata_.Swap(&other->_internal_metadata_); } ::google::protobuf::Metadata FeatureList::GetMetadata() const { protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages]; } // =================================================================== FeatureLists_FeatureListEntry_DoNotUse::FeatureLists_FeatureListEntry_DoNotUse() {} FeatureLists_FeatureListEntry_DoNotUse::FeatureLists_FeatureListEntry_DoNotUse(::google::protobuf::Arena* arena) : SuperType(arena) {} void FeatureLists_FeatureListEntry_DoNotUse::MergeFrom(const FeatureLists_FeatureListEntry_DoNotUse& other) { MergeFromInternal(other); } ::google::protobuf::Metadata FeatureLists_FeatureListEntry_DoNotUse::GetMetadata() const { ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[7]; } void FeatureLists_FeatureListEntry_DoNotUse::MergeFrom( const ::google::protobuf::Message& other) { ::google::protobuf::Message::MergeFrom(other); } // =================================================================== void FeatureLists::InitAsDefaultInstance() { } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int FeatureLists::kFeatureListFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 FeatureLists::FeatureLists() : ::google::protobuf::Message(), _internal_metadata_(NULL) { ::google::protobuf::internal::InitSCC( &protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_FeatureLists.base); SharedCtor(); // @@protoc_insertion_point(constructor:diplomacy.tensorflow.FeatureLists) } FeatureLists::FeatureLists(::google::protobuf::Arena* arena) : ::google::protobuf::Message(), _internal_metadata_(arena), feature_list_(arena) { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_FeatureLists.base); SharedCtor(); RegisterArenaDtor(arena); // @@protoc_insertion_point(arena_constructor:diplomacy.tensorflow.FeatureLists) } FeatureLists::FeatureLists(const FeatureLists& from) : ::google::protobuf::Message(), _internal_metadata_(NULL) { _internal_metadata_.MergeFrom(from._internal_metadata_); feature_list_.MergeFrom(from.feature_list_); // @@protoc_insertion_point(copy_constructor:diplomacy.tensorflow.FeatureLists) } void FeatureLists::SharedCtor() { } FeatureLists::~FeatureLists() { // @@protoc_insertion_point(destructor:diplomacy.tensorflow.FeatureLists) SharedDtor(); } void FeatureLists::SharedDtor() { GOOGLE_DCHECK(GetArenaNoVirtual() == NULL); } void FeatureLists::ArenaDtor(void* object) { FeatureLists* _this = reinterpret_cast< FeatureLists* >(object); (void)_this; } void FeatureLists::RegisterArenaDtor(::google::protobuf::Arena* arena) { } void FeatureLists::SetCachedSize(int size) const { _cached_size_.Set(size); } const ::google::protobuf::Descriptor* FeatureLists::descriptor() { ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; } const FeatureLists& FeatureLists::default_instance() { ::google::protobuf::internal::InitSCC(&protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::scc_info_FeatureLists.base); return *internal_default_instance(); } void FeatureLists::Clear() { // @@protoc_insertion_point(message_clear_start:diplomacy.tensorflow.FeatureLists) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; feature_list_.Clear(); _internal_metadata_.Clear(); } bool FeatureLists::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; // @@protoc_insertion_point(parse_start:diplomacy.tensorflow.FeatureLists) for (;;) { ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { // map<string, .diplomacy.tensorflow.FeatureList> feature_list = 1; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { FeatureLists_FeatureListEntry_DoNotUse::Parser< ::google::protobuf::internal::MapField< FeatureLists_FeatureListEntry_DoNotUse, ::std::string, ::diplomacy::tensorflow::FeatureList, ::google::protobuf::internal::WireFormatLite::TYPE_STRING, ::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE, 0 >, ::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::FeatureList > > parser(&feature_list_); DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( input, &parser)); DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( parser.key().data(), static_cast<int>(parser.key().length()), ::google::protobuf::internal::WireFormatLite::PARSE, "diplomacy.tensorflow.FeatureLists.FeatureListEntry.key")); } else { goto handle_unusual; } break; } default: { handle_unusual: if (tag == 0) { goto success; } DO_(::google::protobuf::internal::WireFormat::SkipField( input, tag, _internal_metadata_.mutable_unknown_fields())); break; } } } success: // @@protoc_insertion_point(parse_success:diplomacy.tensorflow.FeatureLists) return true; failure: // @@protoc_insertion_point(parse_failure:diplomacy.tensorflow.FeatureLists) return false; #undef DO_ } void FeatureLists::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { // @@protoc_insertion_point(serialize_start:diplomacy.tensorflow.FeatureLists) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // map<string, .diplomacy.tensorflow.FeatureList> feature_list = 1; if (!this->feature_list().empty()) { typedef ::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::FeatureList >::const_pointer ConstPtr; typedef ConstPtr SortItem; typedef ::google::protobuf::internal::CompareByDerefFirst<SortItem> Less; struct Utf8Check { static void Check(ConstPtr p) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( p->first.data(), static_cast<int>(p->first.length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, "diplomacy.tensorflow.FeatureLists.FeatureListEntry.key"); } }; if (output->IsSerializationDeterministic() && this->feature_list().size() > 1) { ::std::unique_ptr<SortItem[]> items( new SortItem[this->feature_list().size()]); typedef ::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::FeatureList >::size_type size_type; size_type n = 0; for (::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::FeatureList >::const_iterator it = this->feature_list().begin(); it != this->feature_list().end(); ++it, ++n) { items[static_cast<ptrdiff_t>(n)] = SortItem(&*it); } ::std::sort(&items[0], &items[static_cast<ptrdiff_t>(n)], Less()); ::std::unique_ptr<FeatureLists_FeatureListEntry_DoNotUse> entry; for (size_type i = 0; i < n; i++) { entry.reset(feature_list_.NewEntryWrapper( items[static_cast<ptrdiff_t>(i)]->first, items[static_cast<ptrdiff_t>(i)]->second)); ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( 1, *entry, output); if (entry->GetArena() != NULL) { entry.release(); } Utf8Check::Check(items[static_cast<ptrdiff_t>(i)]); } } else { ::std::unique_ptr<FeatureLists_FeatureListEntry_DoNotUse> entry; for (::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::FeatureList >::const_iterator it = this->feature_list().begin(); it != this->feature_list().end(); ++it) { entry.reset(feature_list_.NewEntryWrapper( it->first, it->second)); ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( 1, *entry, output); if (entry->GetArena() != NULL) { entry.release(); } Utf8Check::Check(&*it); } } } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output); } // @@protoc_insertion_point(serialize_end:diplomacy.tensorflow.FeatureLists) } ::google::protobuf::uint8* FeatureLists::InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const { (void)deterministic; // Unused // @@protoc_insertion_point(serialize_to_array_start:diplomacy.tensorflow.FeatureLists) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // map<string, .diplomacy.tensorflow.FeatureList> feature_list = 1; if (!this->feature_list().empty()) { typedef ::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::FeatureList >::const_pointer ConstPtr; typedef ConstPtr SortItem; typedef ::google::protobuf::internal::CompareByDerefFirst<SortItem> Less; struct Utf8Check { static void Check(ConstPtr p) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( p->first.data(), static_cast<int>(p->first.length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, "diplomacy.tensorflow.FeatureLists.FeatureListEntry.key"); } }; if (deterministic && this->feature_list().size() > 1) { ::std::unique_ptr<SortItem[]> items( new SortItem[this->feature_list().size()]); typedef ::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::FeatureList >::size_type size_type; size_type n = 0; for (::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::FeatureList >::const_iterator it = this->feature_list().begin(); it != this->feature_list().end(); ++it, ++n) { items[static_cast<ptrdiff_t>(n)] = SortItem(&*it); } ::std::sort(&items[0], &items[static_cast<ptrdiff_t>(n)], Less()); ::std::unique_ptr<FeatureLists_FeatureListEntry_DoNotUse> entry; for (size_type i = 0; i < n; i++) { entry.reset(feature_list_.NewEntryWrapper( items[static_cast<ptrdiff_t>(i)]->first, items[static_cast<ptrdiff_t>(i)]->second)); target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageNoVirtualToArray( 1, *entry, deterministic, target); ; if (entry->GetArena() != NULL) { entry.release(); } Utf8Check::Check(items[static_cast<ptrdiff_t>(i)]); } } else { ::std::unique_ptr<FeatureLists_FeatureListEntry_DoNotUse> entry; for (::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::FeatureList >::const_iterator it = this->feature_list().begin(); it != this->feature_list().end(); ++it) { entry.reset(feature_list_.NewEntryWrapper( it->first, it->second)); target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageNoVirtualToArray( 1, *entry, deterministic, target); ; if (entry->GetArena() != NULL) { entry.release(); } Utf8Check::Check(&*it); } } } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target); } // @@protoc_insertion_point(serialize_to_array_end:diplomacy.tensorflow.FeatureLists) return target; } size_t FeatureLists::ByteSizeLong() const { // @@protoc_insertion_point(message_byte_size_start:diplomacy.tensorflow.FeatureLists) size_t total_size = 0; if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { total_size += ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance())); } // map<string, .diplomacy.tensorflow.FeatureList> feature_list = 1; total_size += 1 * ::google::protobuf::internal::FromIntSize(this->feature_list_size()); { ::std::unique_ptr<FeatureLists_FeatureListEntry_DoNotUse> entry; for (::google::protobuf::Map< ::std::string, ::diplomacy::tensorflow::FeatureList >::const_iterator it = this->feature_list().begin(); it != this->feature_list().end(); ++it) { if (entry.get() != NULL && entry->GetArena() != NULL) { entry.release(); } entry.reset(feature_list_.NewEntryWrapper(it->first, it->second)); total_size += ::google::protobuf::internal::WireFormatLite:: MessageSizeNoVirtual(*entry); } if (entry.get() != NULL && entry->GetArena() != NULL) { entry.release(); } } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; } void FeatureLists::MergeFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_merge_from_start:diplomacy.tensorflow.FeatureLists) GOOGLE_DCHECK_NE(&from, this); const FeatureLists* source = ::google::protobuf::internal::DynamicCastToGenerated<const FeatureLists>( &from); if (source == NULL) { // @@protoc_insertion_point(generalized_merge_from_cast_fail:diplomacy.tensorflow.FeatureLists) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { // @@protoc_insertion_point(generalized_merge_from_cast_success:diplomacy.tensorflow.FeatureLists) MergeFrom(*source); } } void FeatureLists::MergeFrom(const FeatureLists& from) { // @@protoc_insertion_point(class_specific_merge_from_start:diplomacy.tensorflow.FeatureLists) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; feature_list_.MergeFrom(from.feature_list_); } void FeatureLists::CopyFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_copy_from_start:diplomacy.tensorflow.FeatureLists) if (&from == this) return; Clear(); MergeFrom(from); } void FeatureLists::CopyFrom(const FeatureLists& from) { // @@protoc_insertion_point(class_specific_copy_from_start:diplomacy.tensorflow.FeatureLists) if (&from == this) return; Clear(); MergeFrom(from); } bool FeatureLists::IsInitialized() const { return true; } void FeatureLists::Swap(FeatureLists* other) { if (other == this) return; if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) { InternalSwap(other); } else { FeatureLists* temp = New(GetArenaNoVirtual()); temp->MergeFrom(*other); other->CopyFrom(*this); InternalSwap(temp); if (GetArenaNoVirtual() == NULL) { delete temp; } } } void FeatureLists::UnsafeArenaSwap(FeatureLists* other) { if (other == this) return; GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual()); InternalSwap(other); } void FeatureLists::InternalSwap(FeatureLists* other) { using std::swap; feature_list_.Swap(&other->feature_list_); _internal_metadata_.Swap(&other->_internal_metadata_); } ::google::protobuf::Metadata FeatureLists::GetMetadata() const { protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_diplomacy_5ftensorflow_2fcore_2fexample_2ffeature_2eproto::file_level_metadata[kIndexInFileMessages]; } // @@protoc_insertion_point(namespace_scope) } // namespace tensorflow } // namespace diplomacy namespace google { namespace protobuf { template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::diplomacy::tensorflow::BytesList* Arena::CreateMaybeMessage< ::diplomacy::tensorflow::BytesList >(Arena* arena) { return Arena::CreateMessageInternal< ::diplomacy::tensorflow::BytesList >(arena); } template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::diplomacy::tensorflow::FloatList* Arena::CreateMaybeMessage< ::diplomacy::tensorflow::FloatList >(Arena* arena) { return Arena::CreateMessageInternal< ::diplomacy::tensorflow::FloatList >(arena); } template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::diplomacy::tensorflow::Int64List* Arena::CreateMaybeMessage< ::diplomacy::tensorflow::Int64List >(Arena* arena) { return Arena::CreateMessageInternal< ::diplomacy::tensorflow::Int64List >(arena); } template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::diplomacy::tensorflow::Feature* Arena::CreateMaybeMessage< ::diplomacy::tensorflow::Feature >(Arena* arena) { return Arena::CreateMessageInternal< ::diplomacy::tensorflow::Feature >(arena); } template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::diplomacy::tensorflow::Features_FeatureEntry_DoNotUse* Arena::CreateMaybeMessage< ::diplomacy::tensorflow::Features_FeatureEntry_DoNotUse >(Arena* arena) { return Arena::CreateMessageInternal< ::diplomacy::tensorflow::Features_FeatureEntry_DoNotUse >(arena); } template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::diplomacy::tensorflow::Features* Arena::CreateMaybeMessage< ::diplomacy::tensorflow::Features >(Arena* arena) { return Arena::CreateMessageInternal< ::diplomacy::tensorflow::Features >(arena); } template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::diplomacy::tensorflow::FeatureList* Arena::CreateMaybeMessage< ::diplomacy::tensorflow::FeatureList >(Arena* arena) { return Arena::CreateMessageInternal< ::diplomacy::tensorflow::FeatureList >(arena); } template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::diplomacy::tensorflow::FeatureLists_FeatureListEntry_DoNotUse* Arena::CreateMaybeMessage< ::diplomacy::tensorflow::FeatureLists_FeatureListEntry_DoNotUse >(Arena* arena) { return Arena::CreateMessageInternal< ::diplomacy::tensorflow::FeatureLists_FeatureListEntry_DoNotUse >(arena); } template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::diplomacy::tensorflow::FeatureLists* Arena::CreateMaybeMessage< ::diplomacy::tensorflow::FeatureLists >(Arena* arena) { return Arena::CreateMessageInternal< ::diplomacy::tensorflow::FeatureLists >(arena); } } // namespace protobuf } // namespace google // @@protoc_insertion_point(global_scope)<|fim▁end|>
if (other == this) return; GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual()); InternalSwap(other);
<|file_name|>collector_dmidecode.py<|end_file_name|><|fim▁begin|>import os import re from opsbro.collector import Collector # DMI have lot of useful information that detectors can use to know lot about the platform/hardware class Dmidecode(Collector): def launch(self): logger = self.logger logger.debug('getDmidecode: start') res = {} # Maybe we are in linux and we can directly read the linux_dmi_path = '/sys/class/dmi/id/' if os.path.exists(linux_dmi_path): file_names = os.listdir(linux_dmi_path) for fname in file_names: p = os.path.join(linux_dmi_path, fname) # There can be a link there, skip them if os.path.isfile(p): f = open(p, 'r') buf = f.read() f.close() res[fname] = buf.strip() logger.debug('getdmidecode: completed, returning') return res elif os.name == 'nt':<|fim▁hole|> else: # try dmidecode way, if exists res = self.execute_shell('LANG=C dmidecode -s') if res is False: self.set_not_eligible('Cannot read dmi information') return False for p in res.split('\n'): if re.search('^ ', p): buf = self.execute_shell('LANG=C dmidecode -s %s' % p).strip() if 'No such file or directory' in buf: logger.warning('Cannot access to dmi information with dmidecode command, exiting this collector.') self.set_not_eligible('Cannot get DMI informations because the dmidecode command is missing.') return res res[p.replace('-', '_').strip()] = buf logger.debug('getdmidecode: completed, returning') return res<|fim▁end|>
self.set_not_eligible('Windows is currently not managed for DMI informations') return False # Ok not direct access, try to launch with
<|file_name|>api_endpoints.go<|end_file_name|><|fim▁begin|>package model import ( "fmt" ) type APIEndpoints []APIEndpoint // DefaultAPIEndpointName is the default endpoint name used when you've omitted `apiEndpoints` but not `externalDNSName` const DefaultAPIEndpointName = "Default" // NewDefaultAPIEndpoints creates the slice of API endpoints containing only the default one which is with arbitrary DNS name and an ELB func NewDefaultAPIEndpoints(dnsName string, subnets []SubnetReference, hostedZoneId string, createRecordSet bool, recordSetTTL int, private bool) APIEndpoints { return []APIEndpoint{ APIEndpoint{ Name: DefaultAPIEndpointName, DNSName: dnsName, LoadBalancer: APIEndpointLB{ APIAccessAllowedSourceCIDRs: DefaultCIDRRanges(), SubnetReferences: subnets, HostedZone: HostedZone{ Identifier: Identifier{ ID: hostedZoneId, }, }, CreateRecordSet: &createRecordSet, RecordSetTTLSpecified: &recordSetTTL, PrivateSpecified: &private, }, }, } } // Validate returns an error if there's any user error in the settings of apiEndpoints func (e APIEndpoints) Validate() error { for i, apiEndpoint := range e { if err := apiEndpoint.Validate(); err != nil { return fmt.Errorf("invalid apiEndpoint \"%s\" at index %d: %v", apiEndpoint.Name, i, err) } } return nil } //type APIDNSRoundRobin struct { // // PrivateSpecified determines the resulting DNS round robin uses private IPs of the nodes for an endpoint // PrivateSpecified bool // // HostedZone is where the resulting A records are created for an endpoint // // Beware that kube-aws will never create a hosted zone used for a DNS round-robin because // // Doing so would result in CloudFormation to be unable to remove the hosted zone when the stack is deleted<|fim▁hole|><|fim▁end|>
// HostedZone HostedZone //}
<|file_name|>httpshandler.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """ Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ import httplib import socket import urllib2 from lib.core.data import logger from lib.core.exception import SqlmapConnectionException ssl = None try: import ssl as _ssl ssl = _ssl except ImportError: pass _protocols = [ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1] class HTTPSConnection(httplib.HTTPSConnection): """ Connection class that enables usage of newer SSL protocols. Reference: http://bugs.python.org/msg128686 """ def __init__(self, *args, **kwargs): httplib.HTTPSConnection.__init__(self, *args, **kwargs) def connect(self): def create_sock(): sock = socket.create_connection((self.host, self.port), self.timeout) if getattr(self, "_tunnel_host", None): self.sock = sock self._tunnel() return sock success = False for protocol in _protocols: try: sock = create_sock() _ = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=protocol) if _: success = True self.sock = _ _protocols.remove(protocol) _protocols.insert(0, protocol) break else:<|fim▁hole|> if not success: raise SqlmapConnectionException("can't establish SSL connection") class HTTPSHandler(urllib2.HTTPSHandler): def https_open(self, req): return self.do_open(HTTPSConnection if ssl else httplib.HTTPSConnection, req) # Bug fix (http://bugs.python.org/issue17849) def _(self, *args): return self._readline() httplib.LineAndFileWrapper._readline = httplib.LineAndFileWrapper.readline httplib.LineAndFileWrapper.readline = _<|fim▁end|>
sock.close() except ssl.SSLError, errMsg: logger.debug("SSL connection error occured ('%s')" % errMsg)
<|file_name|>unused-move-capture.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. //<|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // pretty-expanded FIXME #23616 #![feature(box_syntax)] pub fn main() { let _x: Box<_> = box 1; let lam_move = || {}; lam_move(); }<|fim▁end|>
<|file_name|>macros.rs<|end_file_name|><|fim▁begin|>#![macro_use] <|fim▁hole|> return Err(::std::convert::From::from($expr)); }; }<|fim▁end|>
macro_rules! fail { ($expr:expr) => {
<|file_name|>HorizontalDividerList.styles.js<|end_file_name|><|fim▁begin|>// @flow import { StyleSheet } from 'react-native'; import { colors } from '../../themes'; const styles = StyleSheet.create({<|fim▁hole|> marginHorizontal: 0, backgroundColor: colors.darkDivider, }, }); export default styles;<|fim▁end|>
divider: { height: 1,
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|> return Math.floor(Math.random() * 256); } /** * Creates a random image */ module.exports = function(width, height, callback) { var buffer = new Buffer(width * height * 3); for (var x = 0; x < width; x++) { for (var y = 0; y < height; y++) { var pixelStart = x * width * 3 + y * 3; buffer[pixelStart + 0] = randomColorComponent(); buffer[pixelStart + 1] = randomColorComponent(); buffer[pixelStart + 2] = randomColorComponent(); } } var image = new Jpeg(buffer, width, height); image.encode(function(result) { callback(null, result); }); };<|fim▁end|>
var Buffer = require('buffer').Buffer, Jpeg = require('jpeg').Jpeg; function randomColorComponent() {
<|file_name|>lookup-get-cname-example-1.6.x.py<|end_file_name|><|fim▁begin|># Download the Python helper library from twilio.com/docs/python/install from twilio.rest import Client # Your Account Sid and Auth Token from twilio.com/user/account account_sid = "ACCOUNT_SID" auth_token = "your_auth_token" client = Client(account_sid, auth_token) number = client.lookups.phone_numbers("+16502530000").fetch(<|fim▁hole|>print(number.carrier['type']) print(number.carrier['name'])<|fim▁end|>
type="caller-name", )
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone from django.conf import settings import django.core.validators class Migration(migrations.Migration): dependencies = [ ('auth', '0001_initial'), ] operations = [ migrations.CreateModel( name='ImagrUser', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('password', models.CharField(max_length=128, verbose_name='password')), ('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')), ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), ('username', models.CharField(help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', unique=True, max_length=30, verbose_name='username', validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username.', 'invalid')])), ('first_name', models.CharField(max_length=30, verbose_name='first name', blank=True)), ('last_name', models.CharField(max_length=30, verbose_name='last name', blank=True)), ('email', models.EmailField(max_length=75, verbose_name='email address', blank=True)), ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), ('our_date_joined_field', models.DateField(auto_now_add=True)), ('our_is_active_field', models.BooleanField(default=False)), ('following', models.ManyToManyField(related_name='followers', to=settings.AUTH_USER_MODEL)), ('groups', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of his/her group.', verbose_name='groups')), ('user_permissions', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions')), ], options={ 'abstract': False, 'verbose_name': 'user', 'verbose_name_plural': 'users', }, bases=(models.Model,), ), migrations.CreateModel( name='Album', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('title', models.CharField(max_length=20)), ('description', models.CharField(max_length=140)), ('date_uploaded', models.DateField(auto_now_add=True)), ('date_modified', models.DateField(auto_now=True)), ('date_published', models.DateField()), ('published', models.CharField(default=b'private', max_length=7, choices=[(b'private', b'Private Photo'), (b'shared', b'Shared Photo'), (b'public', b'Public Photo')])), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='Photo', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('title', models.CharField(max_length=20)), ('description', models.CharField(max_length=140)), ('date_uploaded', models.DateField(auto_now_add=True)),<|fim▁hole|> ('date_published', models.DateField()), ('published', models.CharField(default=b'private', max_length=7, choices=[(b'private', b'Private Photo'), (b'shared', b'Shared Photo'), (b'public', b'Public Photo')])), ('image_url', models.CharField(default=b'Photo Not Found', max_length=1024)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ], options={ }, bases=(models.Model,), ), migrations.AddField( model_name='album', name='cover', field=models.ForeignKey(related_name='Album_cover', to='imagr_app.Photo'), preserve_default=True, ), migrations.AddField( model_name='album', name='photos', field=models.ManyToManyField(related_name='Album_photos', to='imagr_app.Photo'), preserve_default=True, ), migrations.AddField( model_name='album', name='user', field=models.ForeignKey(to=settings.AUTH_USER_MODEL), preserve_default=True, ), ]<|fim▁end|>
('date_modified', models.DateField(auto_now=True)),