prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>Lesson27.java<|end_file_name|><|fim▁begin|>package demos.nehe.lesson27; import demos.common.GLDisplay; import demos.common.LessonNativeLoader; /** * @author Abdul Bezrati */ public class Lesson27 extends LessonNativeLoader { public static void main(String[] args) { GLDisplay neheGLDisplay = GLDisplay .createGLDisplay("Lesson 27: Shadows"); Renderer renderer = new Renderer(); <|fim▁hole|> neheGLDisplay.start(); } }<|fim▁end|>
InputHandler inputHandler = new InputHandler(renderer, neheGLDisplay); neheGLDisplay.addGLEventListener(renderer); neheGLDisplay.addKeyListener(inputHandler);
<|file_name|>ApplicationFeature.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.core.metamodel.services.appfeat; import java.util.SortedSet; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import org.apache.isis.applib.IsisApplibModule; import org.apache.isis.applib.annotation.Programmatic; import org.apache.isis.applib.annotation.SemanticsOf; import org.apache.isis.applib.services.appfeat.ApplicationFeatureRepository; import org.apache.isis.applib.services.appfeat.ApplicationMemberType; import org.apache.isis.applib.util.ObjectContracts; /** * Canonical application feature, identified by {@link ApplicationFeatureId}, * and wired together with other application features and cached by {@link ApplicationFeatureRepository}. * * <p> * Note that this is NOT a view model; instead it can be converted to a string using methods of * {@link ApplicationFeatureRepository}, eg {@link ApplicationFeatureRepository#classNamesContainedIn(String, ApplicationMemberType)}. * </p> */ public class ApplicationFeature implements Comparable<ApplicationFeature> { public static abstract class PropertyDomainEvent<T> extends IsisApplibModule.PropertyDomainEvent<ApplicationFeature, T> {} public static abstract class CollectionDomainEvent<T> extends IsisApplibModule.CollectionDomainEvent<ApplicationFeature, T> {} public static abstract class ActionDomainEvent extends IsisApplibModule.ActionDomainEvent<ApplicationFeature> {} //region > constants // using same value for all to neaten up rendering public static final int TYPICAL_LENGTH_PKG_FQN = 50; public static final int TYPICAL_LENGTH_CLS_NAME = 50; public static final int TYPICAL_LENGTH_MEMBER_NAME = 50; //endregion //region > constructors public ApplicationFeature() { this(null); } public ApplicationFeature(final ApplicationFeatureId featureId) { setFeatureId(featureId); } //endregion //region > featureId private ApplicationFeatureId featureId; @Programmatic public ApplicationFeatureId getFeatureId() { return featureId; } public void setFeatureId(final ApplicationFeatureId applicationFeatureId) { this.featureId = applicationFeatureId; } //endregion //region > memberType private ApplicationMemberType memberType; /** * Only for {@link ApplicationFeatureType#MEMBER member}s. */ @Programmatic public ApplicationMemberType getMemberType() { return memberType; } public void setMemberType(final ApplicationMemberType memberType) { this.memberType = memberType; } //endregion //region > returnTypeName (for: properties, collections, actions) private String returnTypeName; /** * Only for {@link ApplicationMemberType#ACTION action}s. */ @Programmatic public String getReturnTypeName() { return returnTypeName; } public void setReturnTypeName(final String returnTypeName) { this.returnTypeName = returnTypeName; } //endregion //region > contributed (for: properties, collections, actions) private boolean contributed; @Programmatic<|fim▁hole|> return contributed; } public void setContributed(final boolean contributed) { this.contributed = contributed; } //endregion //region > derived (properties and collections) private Boolean derived; /** * Only for {@link ApplicationMemberType#PROPERTY} and {@link ApplicationMemberType#COLLECTION} */ @Programmatic public Boolean isDerived() { return derived; } public void setDerived(final Boolean derived) { this.derived = derived; } //endregion //region > propertyMaxLength (properties only) private Integer propertyMaxLength; /** * Only for {@link ApplicationMemberType#ACTION action}s. */ @Programmatic public Integer getPropertyMaxLength() { return propertyMaxLength; } public void setPropertyMaxLength(final Integer propertyMaxLength) { this.propertyMaxLength = propertyMaxLength; } //endregion //region > propertyTypicalLength (properties only) private Integer propertyTypicalLength; /** * Only for {@link ApplicationMemberType#ACTION action}s. */ @Programmatic public Integer getPropertyTypicalLength() { return propertyTypicalLength; } public void setPropertyTypicalLength(final Integer propertyTypicalLength) { this.propertyTypicalLength = propertyTypicalLength; } //endregion //region > actionSemantics (actions only) private SemanticsOf actionSemantics; /** * Only for {@link ApplicationMemberType#ACTION action}s. */ @Programmatic public SemanticsOf getActionSemantics() { return actionSemantics; } public void setActionSemantics(final SemanticsOf actionSemantics) { this.actionSemantics = actionSemantics; } //endregion //region > packages: Contents private final SortedSet<ApplicationFeatureId> contents = Sets.newTreeSet(); @Programmatic public SortedSet<ApplicationFeatureId> getContents() { ApplicationFeatureType.ensurePackage(this.getFeatureId()); return contents; } @Programmatic public void addToContents(final ApplicationFeatureId contentId) { ApplicationFeatureType.ensurePackage(this.getFeatureId()); ApplicationFeatureType.ensurePackageOrClass(contentId); this.contents.add(contentId); } //endregion //region > classes: Properties, Collections, Actions private final SortedSet<ApplicationFeatureId> properties = Sets.newTreeSet(); @Programmatic public SortedSet<ApplicationFeatureId> getProperties() { ApplicationFeatureType.ensureClass(this.getFeatureId()); return properties; } private final SortedSet<ApplicationFeatureId> collections = Sets.newTreeSet(); @Programmatic public SortedSet<ApplicationFeatureId> getCollections() { ApplicationFeatureType.ensureClass(this.getFeatureId()); return collections; } private final SortedSet<ApplicationFeatureId> actions = Sets.newTreeSet(); @Programmatic public SortedSet<ApplicationFeatureId> getActions() { ApplicationFeatureType.ensureClass(this.getFeatureId()); return actions; } @Programmatic public void addToMembers(final ApplicationFeatureId memberId, final ApplicationMemberType memberType) { ApplicationFeatureType.ensureClass(this.getFeatureId()); ApplicationFeatureType.ensureMember(memberId); membersOf(memberType).add(memberId); } @Programmatic public SortedSet<ApplicationFeatureId> membersOf(final ApplicationMemberType memberType) { ApplicationFeatureType.ensureClass(this.getFeatureId()); switch (memberType) { case PROPERTY: return properties; case COLLECTION: return collections; default: // case ACTION: return actions; } } //endregion //region > Functions public static class Functions { private Functions(){} public static final Function<? super ApplicationFeature, ? extends String> GET_FQN = new Function<ApplicationFeature, String>() { @Override public String apply(final ApplicationFeature input) { return input.getFeatureId().getFullyQualifiedName(); } }; public static final Function<ApplicationFeature, ApplicationFeatureId> GET_ID = new Function<ApplicationFeature, ApplicationFeatureId>() { @Override public ApplicationFeatureId apply(final ApplicationFeature input) { return input.getFeatureId(); } }; } public static class Predicates { private Predicates(){} public static Predicate<ApplicationFeature> packageContainingClasses( final ApplicationMemberType memberType, final ApplicationFeatureRepositoryDefault applicationFeatures) { return new Predicate<ApplicationFeature>() { @Override public boolean apply(final ApplicationFeature input) { // all the classes in this package final Iterable<ApplicationFeatureId> classIds = Iterables.filter(input.getContents(), ApplicationFeatureId.Predicates.isClassContaining(memberType, applicationFeatures)); return classIds.iterator().hasNext(); } }; } } //endregion //region > equals, hashCode, compareTo, toString private final static String propertyNames = "featureId"; @Override public int compareTo(final ApplicationFeature other) { return ObjectContracts.compare(this, other, propertyNames); } @Override public boolean equals(final Object obj) { return ObjectContracts.equals(this, obj, propertyNames); } @Override public int hashCode() { return ObjectContracts.hashCode(this, propertyNames); } @Override public String toString() { return ObjectContracts.toString(this, propertyNames); } //endregion }<|fim▁end|>
public boolean isContributed() {
<|file_name|>nameserver.go<|end_file_name|><|fim▁begin|>/** Copyright (C) 2012 Roberto Costumero Moreno <[email protected]> This file is part of Cosmofs. Cosmofs is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Cosmofs is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|>You should have received a copy of the GNU General Public License along with Cosmofs. If not, see <http://www.gnu.org/licenses/>. **/ package cosmofs import ( "encoding/gob" "log" "os" "path/filepath" "regexp" "strings" ) type FileList []*File type DirTable map[string]FileList type IDTable map[string]DirTable var ( Table IDTable = make(IDTable) myID string ) // TODO: Multiple different kinds of errors type NameServerError struct { e error } func (e *NameServerError) Error() string { return "Error in the NameServer" } func init() { // Check if COSMOFSIN environment is set if *Cosmofsin == "" { log.Fatalf("COSMOFSIN not set correctly. Current content <%s>", *Cosmofsin) } // Check if COSMOFSIN is a correct directory if _, err := os.Lstat(*Cosmofsin); err != nil { log.Fatalf("COSMOFSIN not set correctly. Current content <%s>", *Cosmofsin) } sharedDirList := filepath.SplitList(*Cosmofsout) // There shall be at least one shared directory if len(sharedDirList) == 0 { log.Fatal("COSMOFSOUT should have at least one directory or file.") } // TODO: Fix this // HACK to get here myID correctly buffer := parseKeyFile(*pubkeyFileName) _, _, id, ok := parsePubKey(buffer) if !ok { log.Fatal("Cannot parse Public Key File") } myID = string(id) // Create a new user in the table // TODO: Decode and create correct ID err := Table.AddID(myID) if err != nil { log.Fatal("Could not create new ID") } // Shared directories are initialized for _, dir := range sharedDirList { dir = filepath.Clean(dir) // Check wether we can read the current directory fi, err := os.Lstat(dir); if err != nil { continue } // If it is a directory, look for the config file and decode it, or // generate it if it does not already exists. if fi.IsDir() { configFileName := filepath.Join(dir, COSMOFSCONFIGFILE) if *resetConfig { _, err := os.Lstat(configFileName) if err == nil { err := os.Remove(configFileName) if err != nil { log.Fatal("Error re-generating config files.") } } } _, err := os.Lstat(configFileName) if err != nil { err := createConfigFile(dir, configFileName) if err != nil { log.Printf("Error creating config file: %s", err) continue } } // Decode the config file and update data structures. err = decodeConfigFile(configFileName) if err != nil { log.Printf("Error decoding config file: %s", err) continue } } } } func (t IDTable) AddID (id string) (err error) { err = checkID(id) if err != nil { return err } if _, ok := t[id]; !ok { t[id] = make(DirTable) } return err } func (t IDTable) AddDir (id, dir, baseDir string, recursive bool) (err error) { // Check for existing dir err = t.ExistsDir(id,baseDir) if err == nil { log.Printf("Error: Dir %s already exists in the table", baseDir) return err } // Read the directory and include the files on it. fi, err := os.Lstat(dir) if err != nil { log.Printf("Error reading dir: %s - %s", dir, err) return err } if fi.IsDir() { file, err := os.Open(dir) if err != nil { log.Printf("Error reading dir: %s - %s", dir, err) return err } fi, err := file.Readdir(0) if err != nil { log.Printf("Error reading dir contents: %s - %s", dir, err) return err } files := make(FileList, 0) //globalBaseDir := filepath.Join(id, baseDir) for _, ent := range fi { if strings.HasPrefix(ent.Name(), ".") { continue } files = append(files, &File{ LocalPath: filepath.Clean(dir), GlobalPath: filepath.Join(id,baseDir,ent.Name()), Filename: ent.Name(), Size: ent.Size(), IsDir: ent.IsDir(), Owner: MyPublicPeer, KeepCopy: true, Online: false, NumChunks: 1, Chunks: nil, }) if recursive && ent.IsDir() { t.AddDir(id, filepath.Join(dir, ent.Name()), filepath.Join(baseDir, ent.Name()), recursive) } } t.AddID(id) t[id][baseDir] = files return err } return &NameServerError{} } func (t IDTable) ListIDs() (ids []string, err error) { if len(t) > 0 { for k := range t { ids = append(ids, k) } return ids, err } return nil, &NameServerError{} } func (t IDTable) ListAllDirs() (dirs []string, err error) { for id, v := range t { for k := range v { dirs = append(dirs, filepath.Join(id, k)) } } return dirs, err } func (t IDTable) ListDirs(id string) (dirs []string, err error) { if v, ok := t[id]; ok { for k := range v { dirs = append(dirs, filepath.Join(id, k)) } return dirs, err } return nil, &NameServerError{} } func (t IDTable) ListDir (id, dir string) (content []string, err error) { if _, ok := t[id]; ok { if _, ok := t[id][dir]; ok { for _, file := range t[id][dir] { content = append(content, filepath.Join(id, dir, file.Filename)) } return content, err } } return content, &NameServerError{} } func (t IDTable) ExistsID (id string) (i string, err error) { if _, ok := t[id]; ok { return id, err } return "", &NameServerError{} } func (t IDTable) ExistsDir (id, dir string) (err error) { _, err = t.ExistsID(id) if err != nil { return &NameServerError{} } if _, ok := t[id][dir]; !ok { return &NameServerError{} } return err } func (t IDTable) SearchDir (dir string) (result []string, err error) { if len(t) > 0 { found := false for k, v := range t { for d := range v { if strings.Contains(d, dir) { result = append(result, filepath.Join(k,d)) found = true } } } if found { return result, err } } return result, &NameServerError{} } func (t IDTable) SearchFile (name string) (result []string, err error) { if len(t) > 0 { found := false for k, v := range t { for d, files := range v { for _, file := range files { if strings.Contains(file.Filename, name) && !file.IsDir { result = append(result, filepath.Join(k,d,file.Filename)) found = true } } } } if found { return result, err } } return result, &NameServerError{} } func (t IDTable) Search (s string) (result []string, err error) { res1, err := t.SearchDir(s) if err != nil { return result, err } res2, err := t.SearchFile(s) if err != nil { return result, err } result = make([]string, len(res1) + len(res2)) copy(result, res1) copy(result[len(res1):], res2) return result, err } func (t IDTable) DeleteID (id string) { delete (t, id) } func (t IDTable) DeleteDir (id, dir string) { if _, ok := t[id][dir]; ok { delete(t[id], dir) if len(t[id]) == 0 { t.DeleteID(id) } } } func (t IDTable) ReceiveAndMergeTable (decod *gob.Decoder) { var recvTable IDTable = make(IDTable) err := decod.Decode(&recvTable) if err != nil { log.Fatal("Error decoding table: ", err) } log.Printf("LOCAL TABLE: %v\n", Table) log.Printf("REMOTE TABLE: %v\n", recvTable) for k, v := range recvTable { for d, files := range v { if _, ok := t[k][d]; !ok { t.AddID(k) t[k][d] = files log.Printf("Added dir %v from %v\n", d, k) } } } encodeConfigFiles() } func checkID (id string) (err error) { mailregexp, err := regexp.Compile(`^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,4}$`) if err != nil { return err } if mailregexp.MatchString(id) { return err } return &NameServerError{} } func SplitPath (path string) (id, dir string, err error) { res := strings.SplitN(path, "/", 2) if len(res) != 2 { return id, dir, &NameServerError{} } if err := checkID(res[0]); err != nil { return id, dir, &NameServerError{} } return res[0], filepath.Clean(res[1]), err } func createConfigFile(dir, configFileName string) (err error) { // Create the config file. configFile, err := os.Create(configFileName) if err != nil { return err } // Add directory and subdirectories // TODO: Should it be recursive? err = Table.AddDir(myID, dir, filepath.Base(dir), true) if err != nil { log.Printf("Error adding new directory: %s", err) return err } configEnc := gob.NewEncoder(configFile) err = configEnc.Encode(Table) if err != nil { log.Fatal("Error encoding table in config file: ", err) } return err } func decodeConfigFile(configFileName string) (err error){ configFile, err := os.Open(configFileName) if err != nil { log.Printf("Error opening config file: %s", err) return err } configDec := gob.NewDecoder(configFile) err = configDec.Decode(&Table) if err != nil { log.Fatal("Error decoding list of files config file: ", err) } return err } func encodeConfigFiles() (err error){ sharedDirList := filepath.SplitList(*Cosmofsout) // Shared directories are initialized for _, dir := range sharedDirList { dir = filepath.Clean(dir) // Check wether we can read the current directory fi, err := os.Lstat(dir); if err != nil { continue } // If it is a directory, look for the config file and decode it, or // generate it if it does not already exists. if fi.IsDir() { configFileName := filepath.Join(dir, COSMOFSCONFIGFILE) _, err := os.Lstat(configFileName) if err == nil { err := os.Remove(configFileName) if err != nil { log.Fatal("Error re-generating config files.") } } err = createConfigFile(dir, configFileName) if err != nil { log.Printf("Error creating config file: %s", err) continue } // Decode the config file and update data structures. err = decodeConfigFile(configFileName) if err != nil { log.Printf("Error decoding config file: %s", err) continue } } } return err } func PrintTable() { for k, v := range Table { log.Printf("- %v\n", k) for kk, vv := range v { log.Printf("-- %v\n", kk) for _, vvv := range vv { log.Printf("--- %v : %v : %v\n", vvv.Filename, vvv.GlobalPath, vvv.LocalPath) } } } }<|fim▁end|>
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
<|file_name|>logrok.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """Query and aggregate data from log files using SQL-like syntax""" import sys import argparse import os import re import ast import readline import atexit import time import inspect from multiprocessing import cpu_count try: from collections import OrderedDict except ImportError: # python < 2.7 compatability from compat.OrderedDict import OrderedDict from ply import yacc import parser import parallel import screen import sqlfuncs import logformat from util import NoTokenError, parse_format_string, Complete, Table, pretty_print DEBUG = False log_regex = None class LogQuery(object): def __init__(self, data, query): self.data = data self.query = query try: self.ast = parser.parse(query) except NoTokenError, e: print "ERROR: %s" % e.message print query return except SyntaxError: return if DEBUG: # pretty-printer sq = str(self.ast) pretty_print(sq) print sq print '-'*screen.width pass def run(self): start_time = time.time() op_data = sqlfuncs.do(self.ast, self.data[:]) # COPY!!! response = OrderedDict() for row in op_data: for key in row.keys(): if not response.has_key(key): response[key] = [] response[key].append(row[key]) Table(response, start_time).prnt() class LoGrok(object): def __init__(self, args, interactive=False, curses=False, chunksize=10000): if curses: screen.init_curses() elif interactive: screen.init_linebased() self.interactive = interactive self.args = args self.processed_rows = 0 self.oldpct = 0 self.data = [] self.chunksize = chunksize self.complete = Complete() self.crunchlogs() self.interact() def crunchlogs(self): global log_regex if self.args.format is not None: logformat = self.args.format else: logformat = logformat.TYPES[self.args.type] print lines = [] for logfile in self.args.logfile: screen.print_mutable("Reading lines from %s:" % logfile.name) lines += logfile.readlines()<|fim▁hole|> screen.print_mutable("Reading lines from %s: %d" % (logfile.name, len(lines))) logfile.close() screen.print_mutable("", True) log_regex = re.compile(parse_format_string(logformat)) if self.args.lines: lines = lines[:self.args.lines] st = time.time() self.data = parallel.run(log_match, lines, _print=True) et = time.time() print "%d lines crunched in %0.3f seconds" % (len(lines), (et-st)) def interact(self): if screen.is_curses(): screen.draw_curses_screen(self.data) self.main_loop() elif self.interactive: self.shell() else: self.query(self.args.query) def shell(self): try: history = os.path.expanduser('~/.logrok_history') readline.read_history_file(history) except IOError: pass atexit.register(readline.write_history_file, history) readline.set_history_length(1000) readline.parse_and_bind('tab: complete') readline.set_completer(self.complete.complete) # XXX This is ugly and needs to be more intelligent. Ideally, the # completer would use readline.readline() to contextually switch out # the returned matches self.complete.addopts(['select', 'from log', 'where', 'between', 'order by', 'group by', 'limit', ] + get_sqlfuncs() + self.data[0].keys()) while True: q = raw_input("logrok> ").strip() while not q.endswith(";"): q += raw_input("> ").strip() self.query(q) def query(self, query): semicolon = query.find(';') if semicolon != -1: query = query[:semicolon] if query in ('quit', 'bye', 'exit'): sys.exit(0) if query.startswith('help') or query.startswith('?'): answer = "Use sql syntax against your log, `from` clauses are ignored.\n"\ "Queries can span multiple lines and _must_ end in a semicolon `;`.\n"\ " Try: `show fields;` to see available field names. Press TAB at the\n"\ " beginning of a new line to see all available completions." print answer return if query in ('show fields', 'show headers'): print ', '.join(self.data[0].keys()) return else: try: q = LogQuery(self.data, query) return q.run() except SyntaxError, e: return e.message def main_loop(self): while 1: c = screen.getch() if c == ord('x'): break if c == ord('q'): screen.prompt("QUERY:", self.query) def get_sqlfuncs(): return map( lambda x: x[0], filter( lambda x: not x[0].startswith('_') and not x[0] == 'do', inspect.getmembers(sqlfuncs, inspect.isfunction) ) ) @parallel.map def log_match(chunk): response = [] for line in chunk: out = {} m = log_regex.match(line) for key in log_regex.groupindex: if logformat.types.has_key(key): f = logformat.types[key] else: f = str """ # XXX # This is a hack a big big hack # It's here because I discovered that converting the date # strings into date objects using strptime is a HUGE performance hit! # -- don't know what to do about that if f not in (int, str): f = str """ d = m.group(key) out[key] = f(d) response.append(out) return response def main(): cmd = argparse.ArgumentParser(description="Grok/Query/Aggregate log files. Requires python2 >= 2.7") typ = cmd.add_mutually_exclusive_group(required=True) typ.add_argument('-t', '--type', metavar='TYPE', choices=logformat.TYPES, help='{%s} Use built-in log type (default: apache-common)'%', '.join(logformat.TYPES), default='apache-common') typ.add_argument('-f', '--format', action='store', help='Log format (use apache LogFormat string)') typ.add_argument('-C', '--config', type=argparse.FileType('r'), help='httpd.conf file in which to find LogFormat string (requires -T)') cmd.add_argument('-T', '--ctype', help='type-name for LogFormat from specified httpd.conf file (only works with -c)') cmd.add_argument('-j', '--processes', action='store', type=int, help='Number of processes to fork for log crunching (default: smart)', default=parallel.SMART) cmd.add_argument('-l', '--lines', action='store', type=int, help='Only process LINES lines of input') interactive = cmd.add_mutually_exclusive_group(required=False) interactive.add_argument('-i', '--interactive', action='store_true', help="Use line-based interactive interface") interactive.add_argument('-c', '--curses', action='store_true', help=argparse.SUPPRESS) interactive.add_argument('-q', '--query', help="The query to run") cmd.add_argument('-d', '--debug', action='store_true', help="Turn debugging on (you don't want this)") cmd.add_argument('logfile', nargs='+', type=argparse.FileType('r'), help="log(s) to parse/query") args = cmd.parse_args(sys.argv[1:]) if args.config and not args.ctype: cmd.error("-C/--config option requires -T/--ctype option") if args.ctype and not args.config: cmd.error("-T/--ctype only works with -C/--config option") if args.config and args.ctype: config = args.config.read() args.config.close() m = re.search(r'^logformat[\s]+(.*)[\s]+%s' % args.ctype, config, re.I|re.M) if m is None: cmd.error("LogFormat %s not found in %s" % (args.ctype, args.config.name)) format = m.group(1) if (format.startswith("'") or format.startswith('"')) and (format.endswith("'") or format.endswith('"')): format = format[1:-1] args.format = format.replace(r"\'", "'").replace(r'\"', '"') global DEBUG DEBUG = args.debug parser.DEBUG = DEBUG parallel.DEBUG = DEBUG sqlfuncs.DEBUG = DEBUG parser.init() parallel.numprocs = args.processes LoGrok(args, interactive=args.interactive, curses=args.curses) if __name__ == '__main__': try: main() except KeyboardInterrupt: parallel.killall() # TODO -- reset terminal if curses print sys.exit(1)<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import numpy as np import os if os.path.exists('MANIFEST'): os.remove('MANIFEST') include_dirs = [np.get_include()] setup(name="pystruct", version="0.3.2", install_requires=["ad3", "numpy"], packages=['pystruct', 'pystruct.learners', 'pystruct.inference', 'pystruct.models', 'pystruct.utils', 'pystruct.datasets', 'pystruct.tests', 'pystruct.tests.test_learners', 'pystruct.tests.test_models', 'pystruct.tests.test_inference', 'pystruct.tests.test_utils'], include_package_data=True, description="Structured Learning and Prediction in Python", author="Andreas Mueller", author_email="[email protected]", url="http://pystruct.github.io", license="BSD 2-clause", use_2to3=True, ext_modules=[Extension("pystruct.models.utils", ["src/utils.c"], include_dirs=include_dirs), Extension("pystruct.inference._viterbi", ["pystruct/inference/_viterbi.c"], include_dirs=include_dirs)], classifiers=['Intended Audience :: Science/Research', 'Intended Audience :: Developers', 'License :: OSI Approved', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: Unix', 'Operating System :: MacOS', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', ], )<|fim▁end|>
from setuptools import setup from setuptools.extension import Extension
<|file_name|>package.py<|end_file_name|><|fim▁begin|>############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * from glob import glob class Cuda(Package):<|fim▁hole|> Note: This package does not currently install the drivers necessary to run CUDA. These will need to be installed manually. See: https://docs.nvidia.com/cuda/ for details.""" homepage = "https://developer.nvidia.com/cuda-zone" version('9.2.88', 'dd6e33e10d32a29914b7700c7b3d1ca0', expand=False, url="https://developer.nvidia.com/compute/cuda/9.2/Prod/local_installers/cuda_9.2.88_396.26_linux") version('9.1.85', '67a5c3933109507df6b68f80650b4b4a', expand=False, url="https://developer.nvidia.com/compute/cuda/9.1/Prod/local_installers/cuda_9.1.85_387.26_linux") version('9.0.176', '7a00187b2ce5c5e350e68882f42dd507', expand=False, url="https://developer.nvidia.com/compute/cuda/9.0/Prod/local_installers/cuda_9.0.176_384.81_linux-run") version('8.0.61', '33e1bd980e91af4e55f3ef835c103f9b', expand=False, url="https://developer.nvidia.com/compute/cuda/8.0/Prod2/local_installers/cuda_8.0.61_375.26_linux-run") version('8.0.44', '6dca912f9b7e2b7569b0074a41713640', expand=False, url="https://developer.nvidia.com/compute/cuda/8.0/prod/local_installers/cuda_8.0.44_linux-run") version('7.5.18', '4b3bcecf0dfc35928a0898793cf3e4c6', expand=False, url="http://developer.download.nvidia.com/compute/cuda/7.5/Prod/local_installers/cuda_7.5.18_linux.run") version('6.5.14', '90b1b8f77313600cc294d9271741f4da', expand=False, url="http://developer.download.nvidia.com/compute/cuda/6_5/rel/installers/cuda_6.5.14_linux_64.run") def install(self, spec, prefix): runfile = glob(join_path(self.stage.path, 'cuda*_linux*'))[0] chmod = which('chmod') chmod('+x', runfile) runfile = which(runfile) # Note: NVIDIA does not officially support many newer versions of # compilers. For example, on CentOS 6, you must use GCC 4.4.7 or # older. See: # http://docs.nvidia.com/cuda/cuda-installation-guide-linux/#system-requirements # https://gist.github.com/ax3l/9489132 # for details. runfile( '--silent', # disable interactive prompts '--verbose', # create verbose log file '--override', # override compiler version checks '--toolkit', # install CUDA Toolkit '--toolkitpath=%s' % prefix )<|fim▁end|>
"""CUDA is a parallel computing platform and programming model invented by NVIDIA. It enables dramatic increases in computing performance by harnessing the power of the graphics processing unit (GPU).
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>/* Copyright 2015 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at <|fim▁hole|> http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Package service contains code for syncing cloud load balancers // with the service registry. package service // import "k8s.io/kubernetes/pkg/controller/service"<|fim▁end|>
<|file_name|>SVGRenderTreeAsText.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2004, 2005, 2007, 2009 Apple Inc. All rights reserved. * (C) 2005 Rob Buis <[email protected]> * (C) 2006 Alexander Kellett <[email protected]> * Copyright (C) Research In Motion Limited 2010. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "config.h" #if ENABLE(SVG) #include "SVGRenderTreeAsText.h" #include "GraphicsTypes.h" #include "HTMLNames.h" #include "InlineTextBox.h" #include "LinearGradientAttributes.h" #include "NodeRenderStyle.h" #include "Path.h" #include "PatternAttributes.h" #include "RadialGradientAttributes.h" #include "RenderImage.h"<|fim▁hole|>#include "RenderSVGImage.h" #include "RenderSVGInlineText.h" #include "RenderSVGPath.h" #include "RenderSVGResourceClipper.h" #include "RenderSVGResourceFilter.h" #include "RenderSVGResourceGradient.h" #include "RenderSVGResourceLinearGradient.h" #include "RenderSVGResourceMarker.h" #include "RenderSVGResourceMasker.h" #include "RenderSVGResourcePattern.h" #include "RenderSVGResourceRadialGradient.h" #include "RenderSVGResourceSolidColor.h" #include "RenderSVGRoot.h" #include "RenderSVGShape.h" #include "RenderSVGText.h" #include "RenderTreeAsText.h" #include "SVGCircleElement.h" #include "SVGEllipseElement.h" #include "SVGInlineTextBox.h" #include "SVGLineElement.h" #include "SVGLinearGradientElement.h" #include "SVGNames.h" #include "SVGPathElement.h" #include "SVGPathUtilities.h" #include "SVGPatternElement.h" #include "SVGPointList.h" #include "SVGPolyElement.h" #include "SVGRadialGradientElement.h" #include "SVGRectElement.h" #include "SVGRootInlineBox.h" #include "SVGStopElement.h" #include "SVGStyledElement.h" #include <math.h> namespace WebCore { /** class + iomanip to help streaming list separators, i.e. ", " in string "a, b, c, d" * Can be used in cases where you don't know which item in the list is the first * one to be printed, but still want to avoid strings like ", b, c". */ class TextStreamSeparator { public: TextStreamSeparator(const String& s) : m_separator(s) , m_needToSeparate(false) { } private: friend TextStream& operator<<(TextStream&, TextStreamSeparator&); String m_separator; bool m_needToSeparate; }; TextStream& operator<<(TextStream& ts, TextStreamSeparator& sep) { if (sep.m_needToSeparate) ts << sep.m_separator; else sep.m_needToSeparate = true; return ts; } template<typename ValueType> static void writeNameValuePair(TextStream& ts, const char* name, ValueType value) { ts << " [" << name << "=" << value << "]"; } template<typename ValueType> static void writeNameAndQuotedValue(TextStream& ts, const char* name, ValueType value) { ts << " [" << name << "=\"" << value << "\"]"; } static void writeIfNotEmpty(TextStream& ts, const char* name, const String& value) { if (!value.isEmpty()) writeNameValuePair(ts, name, value); } template<typename ValueType> static void writeIfNotDefault(TextStream& ts, const char* name, ValueType value, ValueType defaultValue) { if (value != defaultValue) writeNameValuePair(ts, name, value); } TextStream& operator<<(TextStream& ts, const FloatRect& r) { ts << "at (" << TextStream::FormatNumberRespectingIntegers(r.x()); ts << "," << TextStream::FormatNumberRespectingIntegers(r.y()); ts << ") size " << TextStream::FormatNumberRespectingIntegers(r.width()); ts << "x" << TextStream::FormatNumberRespectingIntegers(r.height()); return ts; } TextStream& operator<<(TextStream& ts, const AffineTransform& transform) { if (transform.isIdentity()) ts << "identity"; else ts << "{m=((" << transform.a() << "," << transform.b() << ")(" << transform.c() << "," << transform.d() << ")) t=(" << transform.e() << "," << transform.f() << ")}"; return ts; } static TextStream& operator<<(TextStream& ts, const WindRule rule) { switch (rule) { case RULE_NONZERO: ts << "NON-ZERO"; break; case RULE_EVENODD: ts << "EVEN-ODD"; break; } return ts; } static TextStream& operator<<(TextStream& ts, const SVGUnitTypes::SVGUnitType& unitType) { ts << SVGPropertyTraits<SVGUnitTypes::SVGUnitType>::toString(unitType); return ts; } static TextStream& operator<<(TextStream& ts, const SVGMarkerUnitsType& markerUnit) { ts << SVGPropertyTraits<SVGMarkerUnitsType>::toString(markerUnit); return ts; } TextStream& operator<<(TextStream& ts, const Color& c) { return ts << c.nameForRenderTreeAsText(); } // FIXME: Maybe this should be in KCanvasRenderingStyle.cpp static TextStream& operator<<(TextStream& ts, const DashArray& a) { ts << "{"; DashArray::const_iterator end = a.end(); for (DashArray::const_iterator it = a.begin(); it != end; ++it) { if (it != a.begin()) ts << ", "; ts << *it; } ts << "}"; return ts; } // FIXME: Maybe this should be in GraphicsTypes.cpp static TextStream& operator<<(TextStream& ts, LineCap style) { switch (style) { case ButtCap: ts << "BUTT"; break; case RoundCap: ts << "ROUND"; break; case SquareCap: ts << "SQUARE"; break; } return ts; } // FIXME: Maybe this should be in GraphicsTypes.cpp static TextStream& operator<<(TextStream& ts, LineJoin style) { switch (style) { case MiterJoin: ts << "MITER"; break; case RoundJoin: ts << "ROUND"; break; case BevelJoin: ts << "BEVEL"; break; } return ts; } static TextStream& operator<<(TextStream& ts, const SVGSpreadMethodType& type) { ts << SVGPropertyTraits<SVGSpreadMethodType>::toString(type).upper(); return ts; } static void writeSVGPaintingResource(TextStream& ts, RenderSVGResource* resource) { if (resource->resourceType() == SolidColorResourceType) { ts << "[type=SOLID] [color=" << static_cast<RenderSVGResourceSolidColor*>(resource)->color() << "]"; return; } // All other resources derive from RenderSVGResourceContainer RenderSVGResourceContainer* container = static_cast<RenderSVGResourceContainer*>(resource); Node* node = container->node(); ASSERT(node); ASSERT(node->isSVGElement()); if (resource->resourceType() == PatternResourceType) ts << "[type=PATTERN]"; else if (resource->resourceType() == LinearGradientResourceType) ts << "[type=LINEAR-GRADIENT]"; else if (resource->resourceType() == RadialGradientResourceType) ts << "[type=RADIAL-GRADIENT]"; ts << " [id=\"" << toSVGElement(node)->getIdAttribute() << "\"]"; } static void writeStyle(TextStream& ts, const RenderObject& object) { const RenderStyle* style = object.style(); const SVGRenderStyle* svgStyle = style->svgStyle(); if (!object.localTransform().isIdentity()) writeNameValuePair(ts, "transform", object.localTransform()); writeIfNotDefault(ts, "image rendering", style->imageRendering(), RenderStyle::initialImageRendering()); writeIfNotDefault(ts, "opacity", style->opacity(), RenderStyle::initialOpacity()); if (object.isSVGShape()) { const RenderSVGShape& shape = static_cast<const RenderSVGShape&>(object); ASSERT(shape.node()); ASSERT(shape.node()->isSVGElement()); Color fallbackColor; if (RenderSVGResource* strokePaintingResource = RenderSVGResource::strokePaintingResource(const_cast<RenderSVGShape*>(&shape), shape.style(), fallbackColor)) { TextStreamSeparator s(" "); ts << " [stroke={" << s; writeSVGPaintingResource(ts, strokePaintingResource); SVGLengthContext lengthContext(toSVGElement(shape.node())); double dashOffset = svgStyle->strokeDashOffset().value(lengthContext); double strokeWidth = svgStyle->strokeWidth().value(lengthContext); const Vector<SVGLength>& dashes = svgStyle->strokeDashArray(); DashArray dashArray; const Vector<SVGLength>::const_iterator end = dashes.end(); for (Vector<SVGLength>::const_iterator it = dashes.begin(); it != end; ++it) dashArray.append((*it).value(lengthContext)); writeIfNotDefault(ts, "opacity", svgStyle->strokeOpacity(), 1.0f); writeIfNotDefault(ts, "stroke width", strokeWidth, 1.0); writeIfNotDefault(ts, "miter limit", svgStyle->strokeMiterLimit(), 4.0f); writeIfNotDefault(ts, "line cap", svgStyle->capStyle(), ButtCap); writeIfNotDefault(ts, "line join", svgStyle->joinStyle(), MiterJoin); writeIfNotDefault(ts, "dash offset", dashOffset, 0.0); if (!dashArray.isEmpty()) writeNameValuePair(ts, "dash array", dashArray); ts << "}]"; } if (RenderSVGResource* fillPaintingResource = RenderSVGResource::fillPaintingResource(const_cast<RenderSVGShape*>(&shape), shape.style(), fallbackColor)) { TextStreamSeparator s(" "); ts << " [fill={" << s; writeSVGPaintingResource(ts, fillPaintingResource); writeIfNotDefault(ts, "opacity", svgStyle->fillOpacity(), 1.0f); writeIfNotDefault(ts, "fill rule", svgStyle->fillRule(), RULE_NONZERO); ts << "}]"; } writeIfNotDefault(ts, "clip rule", svgStyle->clipRule(), RULE_NONZERO); } writeIfNotEmpty(ts, "start marker", svgStyle->markerStartResource()); writeIfNotEmpty(ts, "middle marker", svgStyle->markerMidResource()); writeIfNotEmpty(ts, "end marker", svgStyle->markerEndResource()); } static TextStream& writePositionAndStyle(TextStream& ts, const RenderObject& object) { ts << " " << enclosingIntRect(const_cast<RenderObject&>(object).absoluteClippedOverflowRect()); writeStyle(ts, object); return ts; } static TextStream& operator<<(TextStream& ts, const RenderSVGShape& shape) { writePositionAndStyle(ts, shape); ASSERT(shape.node()->isSVGElement()); SVGElement* svgElement = toSVGElement(shape.node()); SVGLengthContext lengthContext(svgElement); if (svgElement->hasTagName(SVGNames::rectTag)) { SVGRectElement* element = static_cast<SVGRectElement*>(svgElement); writeNameValuePair(ts, "x", element->x().value(lengthContext)); writeNameValuePair(ts, "y", element->y().value(lengthContext)); writeNameValuePair(ts, "width", element->width().value(lengthContext)); writeNameValuePair(ts, "height", element->height().value(lengthContext)); } else if (svgElement->hasTagName(SVGNames::lineTag)) { SVGLineElement* element = static_cast<SVGLineElement*>(svgElement); writeNameValuePair(ts, "x1", element->x1().value(lengthContext)); writeNameValuePair(ts, "y1", element->y1().value(lengthContext)); writeNameValuePair(ts, "x2", element->x2().value(lengthContext)); writeNameValuePair(ts, "y2", element->y2().value(lengthContext)); } else if (svgElement->hasTagName(SVGNames::ellipseTag)) { SVGEllipseElement* element = static_cast<SVGEllipseElement*>(svgElement); writeNameValuePair(ts, "cx", element->cx().value(lengthContext)); writeNameValuePair(ts, "cy", element->cy().value(lengthContext)); writeNameValuePair(ts, "rx", element->rx().value(lengthContext)); writeNameValuePair(ts, "ry", element->ry().value(lengthContext)); } else if (svgElement->hasTagName(SVGNames::circleTag)) { SVGCircleElement* element = static_cast<SVGCircleElement*>(svgElement); writeNameValuePair(ts, "cx", element->cx().value(lengthContext)); writeNameValuePair(ts, "cy", element->cy().value(lengthContext)); writeNameValuePair(ts, "r", element->r().value(lengthContext)); } else if (svgElement->hasTagName(SVGNames::polygonTag) || svgElement->hasTagName(SVGNames::polylineTag)) { SVGPolyElement* element = static_cast<SVGPolyElement*>(svgElement); writeNameAndQuotedValue(ts, "points", element->pointList().valueAsString()); } else if (svgElement->hasTagName(SVGNames::pathTag)) { SVGPathElement* element = static_cast<SVGPathElement*>(svgElement); String pathString; // FIXME: We should switch to UnalteredParsing here - this will affect the path dumping output of dozens of tests. buildStringFromByteStream(element->pathByteStream(), pathString, NormalizedParsing); writeNameAndQuotedValue(ts, "data", pathString); } else ASSERT_NOT_REACHED(); return ts; } static TextStream& operator<<(TextStream& ts, const RenderSVGRoot& root) { return writePositionAndStyle(ts, root); } static void writeRenderSVGTextBox(TextStream& ts, const RenderSVGText& text) { SVGRootInlineBox* box = static_cast<SVGRootInlineBox*>(text.firstRootBox()); if (!box) return; ts << " " << enclosingIntRect(FloatRect(text.location(), FloatSize(box->logicalWidth(), box->logicalHeight()))); // FIXME: Remove this hack, once the new text layout engine is completly landed. We want to preserve the old layout test results for now. ts << " contains 1 chunk(s)"; if (text.parent() && (text.parent()->style()->visitedDependentColor(CSSPropertyColor) != text.style()->visitedDependentColor(CSSPropertyColor))) writeNameValuePair(ts, "color", text.style()->visitedDependentColor(CSSPropertyColor).nameForRenderTreeAsText()); } static inline void writeSVGInlineTextBox(TextStream& ts, SVGInlineTextBox* textBox, int indent) { Vector<SVGTextFragment>& fragments = textBox->textFragments(); if (fragments.isEmpty()) return; RenderSVGInlineText* textRenderer = toRenderSVGInlineText(textBox->textRenderer()); ASSERT(textRenderer); const SVGRenderStyle* svgStyle = textRenderer->style()->svgStyle(); String text = textBox->textRenderer()->text(); unsigned fragmentsSize = fragments.size(); for (unsigned i = 0; i < fragmentsSize; ++i) { SVGTextFragment& fragment = fragments.at(i); writeIndent(ts, indent + 1); unsigned startOffset = fragment.characterOffset; unsigned endOffset = fragment.characterOffset + fragment.length; // FIXME: Remove this hack, once the new text layout engine is completly landed. We want to preserve the old layout test results for now. ts << "chunk 1 "; ETextAnchor anchor = svgStyle->textAnchor(); bool isVerticalText = svgStyle->isVerticalWritingMode(); if (anchor == TA_MIDDLE) { ts << "(middle anchor"; if (isVerticalText) ts << ", vertical"; ts << ") "; } else if (anchor == TA_END) { ts << "(end anchor"; if (isVerticalText) ts << ", vertical"; ts << ") "; } else if (isVerticalText) ts << "(vertical) "; startOffset -= textBox->start(); endOffset -= textBox->start(); // </hack> ts << "text run " << i + 1 << " at (" << fragment.x << "," << fragment.y << ")"; ts << " startOffset " << startOffset << " endOffset " << endOffset; if (isVerticalText) ts << " height " << fragment.height; else ts << " width " << fragment.width; if (!textBox->isLeftToRightDirection() || textBox->dirOverride()) { ts << (textBox->isLeftToRightDirection() ? " LTR" : " RTL"); if (textBox->dirOverride()) ts << " override"; } ts << ": " << quoteAndEscapeNonPrintables(text.substring(fragment.characterOffset, fragment.length)) << "\n"; } } static inline void writeSVGInlineTextBoxes(TextStream& ts, const RenderText& text, int indent) { for (InlineTextBox* box = text.firstTextBox(); box; box = box->nextTextBox()) { if (!box->isSVGInlineTextBox()) continue; writeSVGInlineTextBox(ts, static_cast<SVGInlineTextBox*>(box), indent); } } static void writeStandardPrefix(TextStream& ts, const RenderObject& object, int indent) { writeIndent(ts, indent); ts << object.renderName(); if (object.node()) ts << " {" << object.node()->nodeName() << "}"; } static void writeChildren(TextStream& ts, const RenderObject& object, int indent) { for (RenderObject* child = object.firstChild(); child; child = child->nextSibling()) write(ts, *child, indent + 1); } static inline void writeCommonGradientProperties(TextStream& ts, SVGSpreadMethodType spreadMethod, const AffineTransform& gradientTransform, SVGUnitTypes::SVGUnitType gradientUnits) { writeNameValuePair(ts, "gradientUnits", gradientUnits); if (spreadMethod != SVGSpreadMethodPad) ts << " [spreadMethod=" << spreadMethod << "]"; if (!gradientTransform.isIdentity()) ts << " [gradientTransform=" << gradientTransform << "]"; } void writeSVGResourceContainer(TextStream& ts, const RenderObject& object, int indent) { writeStandardPrefix(ts, object, indent); Element* element = toElement(object.node()); const AtomicString& id = element->getIdAttribute(); writeNameAndQuotedValue(ts, "id", id); RenderSVGResourceContainer* resource = const_cast<RenderObject&>(object).toRenderSVGResourceContainer(); ASSERT(resource); if (resource->resourceType() == MaskerResourceType) { RenderSVGResourceMasker* masker = static_cast<RenderSVGResourceMasker*>(resource); writeNameValuePair(ts, "maskUnits", masker->maskUnits()); writeNameValuePair(ts, "maskContentUnits", masker->maskContentUnits()); ts << "\n"; #if ENABLE(FILTERS) } else if (resource->resourceType() == FilterResourceType) { RenderSVGResourceFilter* filter = static_cast<RenderSVGResourceFilter*>(resource); writeNameValuePair(ts, "filterUnits", filter->filterUnits()); writeNameValuePair(ts, "primitiveUnits", filter->primitiveUnits()); ts << "\n"; // Creating a placeholder filter which is passed to the builder. FloatRect dummyRect; RefPtr<SVGFilter> dummyFilter = SVGFilter::create(AffineTransform(), dummyRect, dummyRect, dummyRect, true); if (RefPtr<SVGFilterBuilder> builder = filter->buildPrimitives(dummyFilter.get())) { if (FilterEffect* lastEffect = builder->lastEffect()) lastEffect->externalRepresentation(ts, indent + 1); } #endif } else if (resource->resourceType() == ClipperResourceType) { RenderSVGResourceClipper* clipper = static_cast<RenderSVGResourceClipper*>(resource); writeNameValuePair(ts, "clipPathUnits", clipper->clipPathUnits()); ts << "\n"; } else if (resource->resourceType() == MarkerResourceType) { RenderSVGResourceMarker* marker = static_cast<RenderSVGResourceMarker*>(resource); writeNameValuePair(ts, "markerUnits", marker->markerUnits()); ts << " [ref at " << marker->referencePoint() << "]"; ts << " [angle="; if (marker->angle() == -1) ts << "auto" << "]\n"; else ts << marker->angle() << "]\n"; } else if (resource->resourceType() == PatternResourceType) { RenderSVGResourcePattern* pattern = static_cast<RenderSVGResourcePattern*>(resource); // Dump final results that are used for rendering. No use in asking SVGPatternElement for its patternUnits(), as it may // link to other patterns using xlink:href, we need to build the full inheritance chain, aka. collectPatternProperties() PatternAttributes attributes; static_cast<SVGPatternElement*>(pattern->node())->collectPatternAttributes(attributes); writeNameValuePair(ts, "patternUnits", attributes.patternUnits()); writeNameValuePair(ts, "patternContentUnits", attributes.patternContentUnits()); AffineTransform transform = attributes.patternTransform(); if (!transform.isIdentity()) ts << " [patternTransform=" << transform << "]"; ts << "\n"; } else if (resource->resourceType() == LinearGradientResourceType) { RenderSVGResourceLinearGradient* gradient = static_cast<RenderSVGResourceLinearGradient*>(resource); // Dump final results that are used for rendering. No use in asking SVGGradientElement for its gradientUnits(), as it may // link to other gradients using xlink:href, we need to build the full inheritance chain, aka. collectGradientProperties() SVGLinearGradientElement* linearGradientElement = static_cast<SVGLinearGradientElement*>(gradient->node()); LinearGradientAttributes attributes; linearGradientElement->collectGradientAttributes(attributes); writeCommonGradientProperties(ts, attributes.spreadMethod(), attributes.gradientTransform(), attributes.gradientUnits()); ts << " [start=" << gradient->startPoint(attributes) << "] [end=" << gradient->endPoint(attributes) << "]\n"; } else if (resource->resourceType() == RadialGradientResourceType) { RenderSVGResourceRadialGradient* gradient = static_cast<RenderSVGResourceRadialGradient*>(resource); // Dump final results that are used for rendering. No use in asking SVGGradientElement for its gradientUnits(), as it may // link to other gradients using xlink:href, we need to build the full inheritance chain, aka. collectGradientProperties() SVGRadialGradientElement* radialGradientElement = static_cast<SVGRadialGradientElement*>(gradient->node()); RadialGradientAttributes attributes; radialGradientElement->collectGradientAttributes(attributes); writeCommonGradientProperties(ts, attributes.spreadMethod(), attributes.gradientTransform(), attributes.gradientUnits()); FloatPoint focalPoint = gradient->focalPoint(attributes); FloatPoint centerPoint = gradient->centerPoint(attributes); float radius = gradient->radius(attributes); float focalRadius = gradient->focalRadius(attributes); ts << " [center=" << centerPoint << "] [focal=" << focalPoint << "] [radius=" << radius << "] [focalRadius=" << focalRadius << "]\n"; } else ts << "\n"; writeChildren(ts, object, indent); } void writeSVGContainer(TextStream& ts, const RenderObject& container, int indent) { // Currently RenderSVGResourceFilterPrimitive has no meaningful output. if (container.isSVGResourceFilterPrimitive()) return; writeStandardPrefix(ts, container, indent); writePositionAndStyle(ts, container); ts << "\n"; writeResources(ts, container, indent); writeChildren(ts, container, indent); } void write(TextStream& ts, const RenderSVGRoot& root, int indent) { writeStandardPrefix(ts, root, indent); ts << root << "\n"; writeChildren(ts, root, indent); } void writeSVGText(TextStream& ts, const RenderSVGText& text, int indent) { writeStandardPrefix(ts, text, indent); writeRenderSVGTextBox(ts, text); ts << "\n"; writeResources(ts, text, indent); writeChildren(ts, text, indent); } void writeSVGInlineText(TextStream& ts, const RenderSVGInlineText& text, int indent) { writeStandardPrefix(ts, text, indent); ts << " " << enclosingIntRect(FloatRect(text.firstRunOrigin(), text.floatLinesBoundingBox().size())) << "\n"; writeResources(ts, text, indent); writeSVGInlineTextBoxes(ts, text, indent); } void writeSVGImage(TextStream& ts, const RenderSVGImage& image, int indent) { writeStandardPrefix(ts, image, indent); writePositionAndStyle(ts, image); ts << "\n"; writeResources(ts, image, indent); } void write(TextStream& ts, const RenderSVGShape& shape, int indent) { writeStandardPrefix(ts, shape, indent); ts << shape << "\n"; writeResources(ts, shape, indent); } void writeSVGGradientStop(TextStream& ts, const RenderSVGGradientStop& stop, int indent) { writeStandardPrefix(ts, stop, indent); SVGStopElement* stopElement = static_cast<SVGStopElement*>(stop.node()); ASSERT(stopElement); RenderStyle* style = stop.style(); if (!style) return; ts << " [offset=" << stopElement->offset() << "] [color=" << stopElement->stopColorIncludingOpacity() << "]\n"; } void writeResources(TextStream& ts, const RenderObject& object, int indent) { const RenderStyle* style = object.style(); const SVGRenderStyle* svgStyle = style->svgStyle(); // FIXME: We want to use SVGResourcesCache to determine which resources are present, instead of quering the resource <-> id cache. // For now leave the DRT output as is, but later on we should change this so cycles are properly ignored in the DRT output. RenderObject& renderer = const_cast<RenderObject&>(object); if (!svgStyle->maskerResource().isEmpty()) { if (RenderSVGResourceMasker* masker = getRenderSVGResourceById<RenderSVGResourceMasker>(object.document(), svgStyle->maskerResource())) { writeIndent(ts, indent); ts << " "; writeNameAndQuotedValue(ts, "masker", svgStyle->maskerResource()); ts << " "; writeStandardPrefix(ts, *masker, 0); ts << " " << masker->resourceBoundingBox(&renderer) << "\n"; } } if (!svgStyle->clipperResource().isEmpty()) { if (RenderSVGResourceClipper* clipper = getRenderSVGResourceById<RenderSVGResourceClipper>(object.document(), svgStyle->clipperResource())) { writeIndent(ts, indent); ts << " "; writeNameAndQuotedValue(ts, "clipPath", svgStyle->clipperResource()); ts << " "; writeStandardPrefix(ts, *clipper, 0); ts << " " << clipper->resourceBoundingBox(&renderer) << "\n"; } } #if ENABLE(FILTERS) if (!svgStyle->filterResource().isEmpty()) { if (RenderSVGResourceFilter* filter = getRenderSVGResourceById<RenderSVGResourceFilter>(object.document(), svgStyle->filterResource())) { writeIndent(ts, indent); ts << " "; writeNameAndQuotedValue(ts, "filter", svgStyle->filterResource()); ts << " "; writeStandardPrefix(ts, *filter, 0); ts << " " << filter->resourceBoundingBox(&renderer) << "\n"; } } #endif } } // namespace WebCore #endif // ENABLE(SVG)<|fim▁end|>
#include "RenderSVGContainer.h" #include "RenderSVGGradientStop.h"
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// Package sqlite3 provides the needed specifics to support the generic // versioning service.<|fim▁hole|><|fim▁end|>
package sqlite3
<|file_name|>formulas.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from django.conf import settings def mask_toggle(number_to_mask_or_unmask): return int(number_to_mask_or_unmask) ^ settings.MASKING_KEY<|fim▁end|>
<|file_name|>group.server.model.js<|end_file_name|><|fim▁begin|>'use strict'; /** * Module dependencies. */ var mongoose = require('mongoose'), Schema = mongoose.Schema; /** * Exam Schema */<|fim▁hole|> }, group_type: { //分组类型 type: String, default: '', trim: true }, group_created:{//分组数据入库时间(时间戳) type: Date, default: Date.now }, group_desc:{//分组的描述 type: String, default: '', trim: true } }); mongoose.model('Group', GroupSchema);<|fim▁end|>
var GroupSchema = new Schema({ group_name: {//分组名称 type: String, default: ''
<|file_name|>download_hots_map.py<|end_file_name|><|fim▁begin|>import os; link = "http://media.blizzard.com/heroes/images/battlegrounds/maps/haunted-mines-v2/underground/6/" column = 0; rc_column = 0; while (rc_column == 0): row = 0;<|fim▁hole|> rc_column = os.system('wget ' + link + str(column) + '/' + str(row) + '.jpg -O ' + str(1000 + column) + '-' + str(1000 + row) + '.jpg') rc_row = rc_column while (rc_row == 0): row += 1 rc_row = os.system('wget ' + link + str(column) + '/' + str(row) + '.jpg -O ' + str(1000 + column) + '-' + str(1000 + row) + '.jpg') column += 1 p = os.popen('ls -1 *.jpg | tail -n2'); second_last_file = p.readline(); last_file = p.readline(); column_end = last_file[0:4] row_end = second_last_file[5:9] print column_end print row_end os.system('rm ' + column_end + '*'); os.system('rm *-' + row_end + '.jpg'); column_end = int(column_end) - 1000; row_end = int(row_end) - 1000; os.system('mkdir temp') i = 0; for r in range(0, row_end): for c in range(0, column_end): file_to_move = str(1000 + c) + '-' + str(1000 + row_end - r - 1) + '.jpg' os.system('cp ' + file_to_move + ' ./temp/' + str(100000 + i) + '.jpg'); i += 1 os.system('montage ./temp/*.jpg -tile ' + str(column_end) + 'x' + str(row_end) + ' -geometry +0+0 result.png'); os.system('montage ./temp/*.jpg -tile ' + str(column_end) + 'x' + str(row_end) + ' -geometry +0+0 result.jpg'); os.system('rm temp -r'); os.system('rm 1*.jpg');<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
mod cli;
<|file_name|>youtube.go<|end_file_name|><|fim▁begin|>package api import ( "net/http" "net/url" "github.com/nordsieck/defect" ) const ( PlaylistItems = "https://www.googleapis.com/youtube/v3/playlistItems" Key = "key" Part = "part" ContentDetails = "contentDetails" PlaylistID = "playlistId"<|fim▁hole|> PageToken = "pageToken" ErrDeserializeInput = defect.Error("Unable to deserialize input") MaxPlaylistItems = "50" // max value allowed: https://developers.google.com/youtube/v3/docs/playlistItems/list ) func GetPlaylistFragment(key, playlist, token string) (videoIDs []string, pageToken string, e error) { params := url.Values{ Key: {key}, Part: {ContentDetails}, PlaylistID: {playlist}, MaxResults: {MaxPlaylistItems}, } if token != "" { params[PageToken] = []string{token} } resp, err := http.Get(PlaylistItems + "?" + params.Encode()) if err != nil { return nil, "", err } defer resp.Body.Close() pl, err := DeserializePlaylist(resp.Body) if err != nil { return nil, "", err } return pl.VideoIDs(), pl.NextPageToken, nil } func PlaylistVideos(key, playlist string) ([]string, error) { fullList := []string{} token := "" for { var list []string var err error list, token, err = GetPlaylistFragment(key, playlist, token) if err != nil { return nil, err } fullList = append(fullList, list...) if token == "" { break } } return fullList, nil }<|fim▁end|>
MaxResults = "maxResults"
<|file_name|>main.go<|end_file_name|><|fim▁begin|>// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by cloud.google.com/go/internal/gapicgen/gensnippets. DO NOT EDIT. // [START aiplatform_v1_generated_VizierService_SuggestTrials_sync] package main import ( "context" <|fim▁hole|>) func main() { ctx := context.Background() c, err := aiplatform.NewVizierClient(ctx) if err != nil { // TODO: Handle error. } defer c.Close() req := &aiplatformpb.SuggestTrialsRequest{ // TODO: Fill request struct fields. // See https://pkg.go.dev/google.golang.org/genproto/googleapis/cloud/aiplatform/v1#SuggestTrialsRequest. } op, err := c.SuggestTrials(ctx, req) if err != nil { // TODO: Handle error. } resp, err := op.Wait(ctx) if err != nil { // TODO: Handle error. } // TODO: Use resp. _ = resp } // [END aiplatform_v1_generated_VizierService_SuggestTrials_sync]<|fim▁end|>
aiplatform "cloud.google.com/go/aiplatform/apiv1" aiplatformpb "google.golang.org/genproto/googleapis/cloud/aiplatform/v1"
<|file_name|>Compressor.ts<|end_file_name|><|fim▁begin|>namespace Allors.Protocol { export class Compressor { static readonly indexMarker = '~'; static readonly itemSeparator = ','; private keyByValue: { [k: string]: string }; private counter: number; constructor() { this.keyByValue = {}; this.counter = 0; } public write(value: string): string { if (value === undefined || value === null) { return null; } if (this.keyByValue.hasOwnProperty(value)) { return this.keyByValue[value];<|fim▁hole|> return `${Compressor.indexMarker}${key}${Compressor.indexMarker}${value}`; } } }<|fim▁end|>
} const key = (++this.counter).toString(); this.keyByValue[value] = key;
<|file_name|>clock_sr_RS.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.1" language="sr_RS"> <context> <name>FirstDayCombo</name> <message> <location filename="../../../lxqtclockconfiguration.cpp" line="49"/> <source>&lt;locale based&gt;</source> <translation type="unfinished"></translation> </message> </context> <context> <name>LXQtClock</name> <message> <location filename="../../../lxqtclock.cpp" line="57"/> <source>Date&amp;Time (clock) plugin is deprecated</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclock.cpp" line="58"/> <source>The &lt;strong&gt;clock&lt;/strong&gt; plugin is deprecated and will be removed in future version of LXQt. Consider replacing it with &lt;strong&gt;worldclock&lt;/strong&gt;.&lt;br/&gt;</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclock.cpp" line="60"/> <source>don&apos;t show this again</source> <translation type="unfinished"></translation> </message> </context> <context> <name>LXQtClockConfiguration</name> <message> <source>LXQt Clock Settings</source> <translation type="vanished">Подешавање Рејзоровог сата</translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="14"/> <source>Clock Settings</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="20"/> <source>Time</source> <translation>Време</translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="26"/> <source>&amp;Show seconds</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="33"/> <source>12 &amp;hour style</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="40"/> <source>&amp;Use UTC</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="59"/> <source>Date &amp;format</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="76"/> <source>&amp;Do not show date</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="86"/> <source>Show date &amp;before time</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="93"/> <source>Show date &amp;after time</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="100"/> <source>Show date below time on new &amp;line</source><|fim▁hole|> <location filename="../../../lxqtclockconfiguration.ui" line="107"/> <source>First day of week in calendar</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="124"/> <source>Orientation</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="130"/> <source>Auto&amp;rotate when the panel is vertical</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclockconfiguration.ui" line="50"/> <source>Date</source> <translation>Датум</translation> </message> <message> <location filename="../../../lxqtclockconfiguration.cpp" line="268"/> <source>Input custom date format</source> <translation type="unfinished"></translation> </message> <message> <location filename="../../../lxqtclockconfiguration.cpp" line="268"/> <source>Interpreted sequences of date format are: d the day as number without a leading zero (1 to 31) dd the day as number with a leading zero (01 to 31) ddd the abbreviated localized day name (e.g. &apos;Mon&apos; to &apos;Sun&apos;). dddd the long localized day name (e.g. &apos;Monday&apos; to &apos;Sunday&apos;). M the month as number without a leading zero (1-12) MM the month as number with a leading zero (01-12) MMM the abbreviated localized month name (e.g. &apos;Jan&apos; to &apos;Dec&apos;). MMMM the long localized month name (e.g. &apos;January&apos; to &apos;December&apos;). yy the year as two digit number (00-99) yyyy the year as four digit number All other input characters will be treated as text. Any sequence of characters that are enclosed in single quotes (&apos;) will also be treated as text and not be used as an expression. Custom date format:</source> <translation type="unfinished"></translation> </message> </context> </TS><|fim▁end|>
<translation type="unfinished"></translation> </message> <message>
<|file_name|>image_graph.py<|end_file_name|><|fim▁begin|>import tensorflow as tf from tensorflow.contrib import layers from tensorflow.contrib.framework import arg_scope from tensormate.graph import TfGgraphBuilder class ImageGraphBuilder(TfGgraphBuilder): def __init__(self, scope=None, device=None, plain=False, data_format="NHWC", data_format_ops=(layers.conv2d, layers.convolution2d, layers.convolution2d_transpose, layers.convolution2d_in_plane, layers.convolution2d_transpose, layers.conv2d_in_plane, layers.conv2d_transpose, layers.separable_conv2d, layers.separable_convolution2d,<|fim▁hole|> self.data_format = data_format self.data_format_ops = data_format_ops if data_format_ops is not None else [] def _call_body(self, *args, **kwargs): # is_training = kwargs.get("is_training", True) # reuse = self.ref_count > 0 with tf.variable_scope(self._scope, reuse=tf.AUTO_REUSE): with arg_scope(self.data_format_ops, data_format=self.data_format): if self._device is None: output = self._build(*args, **kwargs) else: with tf.device(self._device): output = self._build(*args, **kwargs) return output<|fim▁end|>
layers.avg_pool2d, layers.max_pool2d, layers.batch_norm)): super(ImageGraphBuilder, self).__init__(scope=scope, device=device, plain=plain)
<|file_name|>CollectionCtrl.js<|end_file_name|><|fim▁begin|>(function(){ function CollectionCtrl(Fixtures){ this.albums = Fixtures.getCollection(12);<|fim▁hole|> } angular .module('blocJams') .controller('CollectionCtrl',['Fixtures', CollectionCtrl]); })();<|fim▁end|>
<|file_name|>auth_handler.go<|end_file_name|><|fim▁begin|>package controller<|fim▁hole|> import ( "net/http" "encoding/json" "rest-commander/store" "rest-commander/model/dto" ) type AuthenticationController interface { HandleLogin(w http.ResponseWriter, r *http.Request) HandleLogout(w http.ResponseWriter, r *http.Request) } func (t *AuthenticationRoute) HandleLogin(w http.ResponseWriter, r *http.Request){ var auth dto.LoginRequest err := json.NewDecoder(r.Body).Decode(&auth) if err != nil { http.Error(w, err.Error(), 400) return } if !t.userStore.CheckPassword(auth.Username, auth.Password) { resp := dto.ErrorResponse{ Message: "Username or password are incorrect!", } w.WriteHeader(http.StatusBadRequest) json.NewEncoder(w).Encode(resp) return } token := store.NewAuthenticationToken(auth.Username) t.tokenStore.Add(token) t.userStore.Get(auth.Username).Password = auth.Password resp := dto.LoginResponse{ Token: token.Token, } json.NewEncoder(w).Encode(resp) } func (t *AuthenticationRoute) HandleLogout(w http.ResponseWriter, r *http.Request){ token := GetAuthtokenFromRequest(r) t.tokenStore.Remove(token.Token) }<|fim▁end|>
<|file_name|>IteratorGuide.java<|end_file_name|><|fim▁begin|>package com.baeldung.iteratorguide;<|fim▁hole|>import java.util.Iterator; import java.util.List; import java.util.ListIterator; public class IteratorGuide { public static void main(String[] args) { List<String> items = new ArrayList<>(); items.add("ONE"); items.add("TWO"); items.add("THREE"); Iterator<String> iter = items.iterator(); while (iter.hasNext()) { String next = iter.next(); System.out.println(next); iter.remove(); } ListIterator<String> listIterator = items.listIterator(); while(listIterator.hasNext()) { String nextWithIndex = items.get(listIterator.nextIndex()); String next = listIterator.next(); if( "ONE".equals(next)) { listIterator.set("SWAPPED"); } } listIterator.add("FOUR"); while(listIterator.hasPrevious()) { String previousWithIndex = items.get(listIterator.previousIndex()); String previous = listIterator.previous(); System.out.println(previous); } listIterator.forEachRemaining(e -> { System.out.println(e); }); } }<|fim▁end|>
import java.util.ArrayList;
<|file_name|>encodeBackslashEscapes.js<|end_file_name|><|fim▁begin|>/**<|fim▁hole|> * text = escapeCharacters(text,"\\",true); * text = escapeCharacters(text,"`*_{}[]()>#+-.!",true); * * ...but we're sidestepping its use of the (slow) RegExp constructor * as an optimization for Firefox. This function gets called a LOT. */ showdown.subParser('encodeBackslashEscapes', function (text, options, globals) { 'use strict'; text = globals.converter._dispatch('encodeBackslashEscapes.before', text, options, globals); text = text.replace(/\\(\\)/g, showdown.helper.escapeCharactersCallback); text = text.replace(/\\([`*_{}\[\]()>#+.!~=-])/g, showdown.helper.escapeCharactersCallback); text = globals.converter._dispatch('encodeBackslashEscapes.after', text, options, globals); return text; });<|fim▁end|>
* Returns the string, with after processing the following backslash escape sequences. * * attacklab: The polite way to do this is with the new escapeCharacters() function: *
<|file_name|>command.cc<|end_file_name|><|fim▁begin|>// Copyright 2011 Software Freedom Conservancy // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "command.h" #include "command_types.h" #include "logging.h" namespace webdriver { Command::Command() : command_type_(webdriver::CommandType::NoCommand) { } Command::~Command() { } void Command::Populate(const std::string& json_command) { LOG(TRACE) << "Entering Command::Populate"; // Clear the existing maps. this->command_parameters_.clear(); this->locator_parameters_.clear(); <|fim▁hole|> Json::Reader reader; bool successful_parse = reader.parse(json_command, root); if (!successful_parse) { // report to the user the failure and their locations in the document. LOG(WARN) << "Failed to parse configuration due " << reader.getFormatedErrorMessages() << std::endl << "JSON command: '" << json_command << "'"; } this->command_type_ = root.get("command", webdriver::CommandType::NoCommand).asString(); if (this->command_type_ != webdriver::CommandType::NoCommand) { Json::Value locator_parameter_object = root["locator"]; Json::Value::iterator it = locator_parameter_object.begin(); Json::Value::iterator end = locator_parameter_object.end(); for (; it != end; ++it) { std::string key = it.key().asString(); std::string value = locator_parameter_object[key].asString(); this->locator_parameters_[key] = value; } Json::Value command_parameter_object = root["parameters"]; it = command_parameter_object.begin(); end = command_parameter_object.end(); for (; it != end; ++it) { std::string key = it.key().asString(); Json::Value value = command_parameter_object[key]; this->command_parameters_[key] = value; } } else { LOG(DEBUG) << "Command type is zero, no 'command' attribute in JSON object"; } } } // namespace webdriver<|fim▁end|>
LOG(DEBUG) << "Raw JSON command: " << json_command; Json::Value root;
<|file_name|>kahansum.rs<|end_file_name|><|fim▁begin|>// Implements http://rosettacode.org/wiki/Kahan_summation #![feature(std_misc)] #![feature(collections)] use std::num::Float; use std::f32; fn find_max(lst: &[f32]) -> Option<f32> { if lst.is_empty() { return None } let max = lst.iter().fold(f32::NEG_INFINITY, |a, &b| Float::max(a, b)); Some(max) } fn with_bits(val: f32, digits: usize) -> f32 { let num = std::f32::to_str_digits(val, digits); num.parse::<f32>().unwrap() } fn kahan_sum(lst: &[f32]) -> Option<f32> { let mut sum = 0.0f32; let mut c = 0.0f32; for i in lst { let y = *i - c; let t = sum + y; c = (t - sum) - y; sum = t; } Some(with_bits(sum, 1)) } fn all_sums(vec: &[f32]) -> Vec<f32> { let mut res = Vec::new(); let mut perms = vec.permutations(); loop { let v = perms.next(); match v { Some(_v) => { let mut sum = 0.0f32; for e in &_v { sum += with_bits(*e, 1); } res.push(with_bits(sum, 1)); } None => break } } res } #[cfg(not(test))] fn main() { let v = [10000.0f32, 3.14159, 2.71828]; let sums = all_sums(&v); let res = kahan_sum(&v).unwrap(); let max = find_max(&sums[..]).unwrap(); println!("max: {} res: {}", max, res); } #[test] fn test_kahansum() { let v = [10000.0f32, 3.14159, 2.71828]; let sums = all_sums(&v); let res = kahan_sum(&v).unwrap(); let max = find_max(&sums[..]).unwrap(); assert!(max < res); } #[test] fn test_withbits() { let v = 3.123345f32;<|fim▁hole|>}<|fim▁end|>
let res = with_bits(v, 3); assert!(res == 3.123f32);
<|file_name|>nowvideo.py<|end_file_name|><|fim▁begin|>""" urlresolver Kodi plugin Copyright (C) 2011 t0mm0 Updated by Gujal (C) 2016 This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import re from urlresolver import common from urlresolver.resolver import UrlResolver, ResolverError class NowvideoResolver(UrlResolver): name = "nowvideo"<|fim▁hole|> domains = ['nowvideo.eu', 'nowvideo.ch', 'nowvideo.sx', 'nowvideo.co', 'nowvideo.li', 'nowvideo.fo', 'nowvideo.at', 'nowvideo.ec'] pattern = '(?://|\.)(nowvideo\.(?:eu|ch|sx|co|li|fo|at|ec))/(?:video/|embed\.php\?\S*v=)([A-Za-z0-9]+)' def __init__(self): self.net = common.Net() def get_media_url(self, host, media_id): web_url = self.get_url(host, media_id) stream_url = '' html = self.net.http_GET(web_url).content try: r = re.search('flashvars.filekey=(.+?);', html) if r: r = r.group(1) try: filekey = re.compile('\s+%s="(.+?)"' % r).findall(html)[-1] except: filekey = r player_url = 'http://www.nowvideo.sx/api/player.api.php?key=%s&file=%s' % (filekey, media_id) html = self.net.http_GET(player_url).content r = re.search('url=(.+?)&', html) if r: stream_url = r.group(1) else: raise ResolverError('File Not Found or removed') except: print "no embedded urls found using first method" try: r = re.search('id="player".*?src="(.*?)"', html, re.DOTALL) if r: stream_url = r.group(1) except: print "no embedded urls found using second method" if stream_url: return '%s%s' % (stream_url, '|Referer=' + web_url) else: raise ResolverError('File Not Found or removed') def get_url(self, host, media_id): return 'http://embed.nowvideo.sx/embed/?v=%s' % media_id<|fim▁end|>
<|file_name|>bc-tabs.js<|end_file_name|><|fim▁begin|>/** * @module components * */ import $ from 'jquery'; import { assert } from '@ember/debug'; import { dasherize } from '@ember/string'; import EmberObject, { get, set } from '@ember/object'; import { isNone, isEmpty } from '@ember/utils'; import Component from '@ember/component'; import { inject as service } from '@ember/service'; import layout from '../templates/components/bc-tabs'; import parse from '../utils/parse'; /***/ const TAB_CONTENT = '.--bc-tabs-content'; function getQueryParams() { // get url search params let search = window.location.search.slice(1); // parse params to an object let params = {}; if (!isEmpty(search)) { params = parse(search); } // return params return params; } function getTabFromQueryParams(params=null) { // get params if none are passed in if (params === null) { params = getQueryParams(); } // get tab if the tab is not an emptry string or array and // the tab is not null or undefined // otherwise return '' (empty string) if (!isEmpty(get(params, 'bc_tab'))) { return dasherize(get(params, 'bc_tab')); } return null; } /** * `Component/BCTabs` * * @class BCTabs Component * @extends Ember.Component */ export default Component.extend({ layout: layout, classNames: ['--bc-tabs'], router: service(), useRouter: true, /** * variable for tracking tabNames, is an array * * @property model * @type object[] */ model: null, defaultTab: '', currentTab: '', hashName: '', firstRender: false, init() { this._super(); this.handleHash(); }, didRender() { this._super(); if (!get(this, 'firstRender')) { set(this, 'firstRender', true); let model = this.buildTabData(); if (!isEmpty(model)) { if (isEmpty(get(this, 'defaultTab'))) { let ftab = model.filterBy('isViewable', true).get('firstObject'); set(this, 'defaultTab', get(ftab, 'id')); } let activeTab; if (!isEmpty(get(this, 'hashName'))) { activeTab = model.findBy('id', get(this, 'hashName')); } if (isNone(activeTab) && !isEmpty(get(this, 'defaultTab'))) { activeTab = model.findBy('id', get(this, 'defaultTab')); if (isNone(activeTab)) { activeTab = model.filterBy('isViewable', true).get('firstObject'); set(this, 'defaultTab', get(activeTab, 'id')); } } this.openTab(activeTab); } } }, buildTabData() { assert('buildTabData must be called after render', this.$().length > 0); let model = []; this.$(TAB_CONTENT).children().each((index, el) => { let elData = $(el).data(); let data = EmberObject.create({ el, id: elData.id, active: false, tabName: elData.tabName, tabIndex: elData.tabIndex, isViewable: elData.isViewable, showBadge: elData.showBadge, badgeContent: elData.badgeContent, badgeColor: elData.badgeColor, showTab() { set(this, 'active', true); elData.showTab(); }, hideTab() { set(this, 'active', false); elData.hideTab(); }, on: elData.on, }); // register for child events data.on('change', () => { this.buildTabData(); }); model.push(data); }); if (get(model, 'length') > 0) { // sort models by tabIndex model = model.sortBy('tabIndex'); } <|fim▁hole|> handleHash() { const hash = window.location.hash; if (!isEmpty(hash) && hash.search(/^#tab-/) !== -1) { const name = dasherize(hash.replace(/^#tab-/, '').trim()); set(this, 'hashName', name); } else { set(this, 'hashName', getTabFromQueryParams()); } }, openTab(tab) { if (this.$().length > 0) { // hide all other tabs get(this, 'model').forEach(item => item.hideTab()); // show the new tab tab.showTab(); let tabname = get(tab, 'id'); const params = getQueryParams(); const curTab = getTabFromQueryParams(params); if (isEmpty(tabname) || tabname === get(this, 'defaultTab')) { tabname = null; } if (curTab !== tabname) { set(this, 'hashName', tabname); if (get(this, 'useRouter') && !isNone(get(this, 'router'))) { set(params, 'bc_tab', tabname); get(this, 'router').replaceWith(get(this, 'router.currentRouteName'), { queryParams: params }); } else { if (!isNone(tabname)) { window.history.replaceState('', document.title, `${window.location.pathname}#tab-${tabname}`); } else { window.history.replaceState('', document.title, window.location.pathname); } } } set(this, 'currentTab', tab); } }, triggerTabChange() { this.handleHash(); let id = get(this, 'hashName') || get(this, 'defaultTab'); let tab = get(this, 'model').findBy('id', id); if (tab && tab.id !== get(this, 'currentTab.id')) { this.openTab(tab); } }, didInsertElement() { this._super(...arguments); // setup router didTransition const router = get(this, 'router._router'); if (router && router.on) { router.on('didTransition', this, this.triggerTabChange); } }, willDestroyElement() { this._super(...arguments); const router = get(this, 'router._router'); if (router && router.off) { router.off('didTransition', this, this.triggerTabChange); } }, actions: { changeTab(tab) { this.openTab(tab); } } });<|fim▁end|>
set(this, 'model', model); return model; },
<|file_name|>upload.js<|end_file_name|><|fim▁begin|>(function () { "use strict"; require('futures/forEachAsync'); var fs = require('fs'), crypto = require('crypto'), path = require('path'), exec = require('child_process').exec, mime = require('mime'), FileStat = require('filestat'), dbaccess = require('../dbaccess'), utils = require('../utils'), hashAlgo = "md5"; function readFile(filePath, callback) { var readStream, hash = crypto.createHash(hashAlgo); readStream = fs.createReadStream(filePath); readStream.on('data', function (data) { hash.update(data); }); readStream.on('error', function (err) { console.log("Read Error: " + err.toString()); readStream.destroy(); fs.unlink(filePath); callback(err); }); readStream.on('end', function () { callback(null, hash.digest("hex")); }); } function saveToFs(md5, filePath, callback) { var newPath = utils.hashToPath(md5); path.exists(newPath, function (exists) { if (exists) { fs.move(filePath, newPath, function (err) { callback(err, newPath); }); return; } exec('mkdir -p ' + newPath, function (err, stdout, stderr) { var tError; if (err || stderr) { console.log("Err: " + (err ? err : "none")); console.log("stderr: " + (stderr ? stderr : "none")); tError = {error: err, stderr: stderr, stdout: stdout};<|fim▁hole|> return callback(tError, newPath); } console.log("stdout: " + (stdout ? stdout : "none")); fs.move(filePath, newPath, function (moveErr) { callback(moveErr, newPath); }); }); }); } function addKeysToFileStats(fieldNames, stats) { var fileStats = []; stats.forEach(function (item) { var fileStat = new FileStat(); item.forEach(function (fieldValue, i) { fileStat[fieldNames[i]] = fieldValue; }); if (fileStat.path) { fileStat.type = mime.lookup(fileStat.path); } fileStats.push(fileStat); }); return fileStats; } function importFile(fileStat, tmpFile, username, callback) { var oldPath; oldPath = tmpFile.path; readFile(oldPath, function (err, md5) { if (err) { fileStat.err = err; callback(err, fileStat); return; } // if we have an md5sum and they don't match, abandon ship if (fileStat.md5 && fileStat.md5 !== md5) { callback("MD5 sums don't match"); return; } fileStat.md5 = md5; fileStat.genTmd5(function (error, tmd5) { if (!error) { fileStat.tmd5 = tmd5; saveToFs(fileStat.md5, oldPath, function (fserr) { if (fserr) { // ignoring possible unlink error fs.unlink(oldPath); fileStat.err = "File did not save"; } else { dbaccess.put(fileStat, username); } callback(fserr, fileStat); }); } }); }); } function handleUpload(req, res, next) { if (!req.form) { return next(); } req.form.complete(function (err, fields, files) { var fileStats, bFirst; fields.statsHeader = JSON.parse(fields.statsHeader); fields.stats = JSON.parse(fields.stats); fileStats = addKeysToFileStats(fields.statsHeader, fields.stats); dbaccess.createViews(req.remoteUser, fileStats); res.writeHead(200, {'Content-Type': 'application/json'}); // response as array res.write("["); bFirst = true; function handleFileStat(next, fileStat) { // this callback is synchronous fileStat.checkMd5(function (qmd5Error, qmd5) { function finishReq(err) { console.log(fileStat); fileStat.err = err; // we only want to add a comma after the first one if (!bFirst) { res.write(","); } bFirst = false; res.write(JSON.stringify(fileStat)); return next(); } if (qmd5Error) { return finishReq(qmd5Error); } importFile(fileStat, files[qmd5], req.remoteUser, finishReq); }); } fileStats.forEachAsync(handleFileStat).then(function () { // end response array res.end("]"); }); }); } module.exports = handleUpload; }());<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>function solve(params) { var N = parseInt(params[0]), K = parseInt(params[1]), numbersAsString = params[2]; var numbers = numbersAsString.split(' ').map(Number); var result = []; for (var i = 0; i < N; i += 1) { if(i+K-1 === N) { break; } var min = 1000000000, max = -1000000000;<|fim▁hole|> max = numbers[j+i]; } if(numbers[i+j] < min) { min = numbers[j+i]; } } var sum = min + max; result.push(sum); } console.log(result.join(',')); //print answer } var test1 = ['4', '2', '1 3 1 8'], test2 = ['5', '3', '7 7 8 9 10']; console.log(solve(test1)); console.log(solve(test2));<|fim▁end|>
for (var j = 0; j < K; j += 1) { if(numbers[i+j] > max) {
<|file_name|>tests.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- """ Tests for django.core.servers. """ from __future__ import unicode_literals import os import socket from django.core.exceptions import ImproperlyConfigured from django.test import LiveServerTestCase from django.test.utils import override_settings from django.utils.http import urlencode from django.utils.six.moves.urllib.error import HTTPError from django.utils.six.moves.urllib.request import urlopen from django.utils._os import upath <|fim▁hole|> TEST_ROOT = os.path.dirname(upath(__file__)) TEST_SETTINGS = { 'MEDIA_URL': '/media/', 'MEDIA_ROOT': os.path.join(TEST_ROOT, 'media'), 'STATIC_URL': '/static/', 'STATIC_ROOT': os.path.join(TEST_ROOT, 'static'), } class LiveServerBase(LiveServerTestCase): available_apps = [ 'servers', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', ] fixtures = ['testdata.json'] urls = 'servers.urls' @classmethod def setUpClass(cls): # Override settings cls.settings_override = override_settings(**TEST_SETTINGS) cls.settings_override.enable() super(LiveServerBase, cls).setUpClass() @classmethod def tearDownClass(cls): # Restore original settings cls.settings_override.disable() super(LiveServerBase, cls).tearDownClass() def urlopen(self, url): return urlopen(self.live_server_url + url) class LiveServerAddress(LiveServerBase): """ Ensure that the address set in the environment variable is valid. Refs #2879. """ @classmethod def setUpClass(cls): # Backup original environment variable address_predefined = 'DJANGO_LIVE_TEST_SERVER_ADDRESS' in os.environ old_address = os.environ.get('DJANGO_LIVE_TEST_SERVER_ADDRESS') # Just the host is not accepted cls.raises_exception('localhost', ImproperlyConfigured) # The host must be valid cls.raises_exception('blahblahblah:8081', socket.error) # The list of ports must be in a valid format cls.raises_exception('localhost:8081,', ImproperlyConfigured) cls.raises_exception('localhost:8081,blah', ImproperlyConfigured) cls.raises_exception('localhost:8081-', ImproperlyConfigured) cls.raises_exception('localhost:8081-blah', ImproperlyConfigured) cls.raises_exception('localhost:8081-8082-8083', ImproperlyConfigured) # If contrib.staticfiles isn't configured properly, the exception # should bubble up to the main thread. old_STATIC_URL = TEST_SETTINGS['STATIC_URL'] TEST_SETTINGS['STATIC_URL'] = None cls.raises_exception('localhost:8081', ImproperlyConfigured) TEST_SETTINGS['STATIC_URL'] = old_STATIC_URL # Restore original environment variable if address_predefined: os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = old_address else: del os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] @classmethod def tearDownClass(cls): # skip it, as setUpClass doesn't call its parent either pass @classmethod def raises_exception(cls, address, exception): os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = address try: super(LiveServerAddress, cls).setUpClass() raise Exception("The line above should have raised an exception") except exception: pass finally: super(LiveServerAddress, cls).tearDownClass() def test_test_test(self): # Intentionally empty method so that the test is picked up by the # test runner and the overriden setUpClass() method is executed. pass class LiveServerViews(LiveServerBase): def test_404(self): """ Ensure that the LiveServerTestCase serves 404s. Refs #2879. """ try: self.urlopen('/') except HTTPError as err: self.assertEqual(err.code, 404, 'Expected 404 response') else: self.fail('Expected 404 response') def test_view(self): """ Ensure that the LiveServerTestCase serves views. Refs #2879. """ f = self.urlopen('/example_view/') self.assertEqual(f.read(), b'example view') def test_static_files(self): """ Ensure that the LiveServerTestCase serves static files. Refs #2879. """ f = self.urlopen('/static/example_static_file.txt') self.assertEqual(f.read().rstrip(b'\r\n'), b'example static file') def test_media_files(self): """ Ensure that the LiveServerTestCase serves media files. Refs #2879. """ f = self.urlopen('/media/example_media_file.txt') self.assertEqual(f.read().rstrip(b'\r\n'), b'example media file') def test_environ(self): f = self.urlopen('/environ_view/?%s' % urlencode({'q': 'тест'})) self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read()) class LiveServerDatabase(LiveServerBase): def test_fixtures_loaded(self): """ Ensure that fixtures are properly loaded and visible to the live server thread. Refs #2879. """ f = self.urlopen('/model_view/') self.assertEqual(f.read().splitlines(), [b'jane', b'robert']) def test_database_writes(self): """ Ensure that data written to the database by a view can be read. Refs #2879. """ self.urlopen('/create_model_instance/') self.assertQuerysetEqual( Person.objects.all().order_by('pk'), ['jane', 'robert', 'emily'], lambda b: b.name )<|fim▁end|>
from .models import Person
<|file_name|>no.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2003-2019, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license */ CKEDITOR.plugins.setLang( 'stylescombo', 'no', { label: 'Stil', panelTitle: 'Stilformater', panelTitle1: 'Blokkstiler', <|fim▁hole|> panelTitle2: 'Inlinestiler', panelTitle3: 'Objektstiler' } );<|fim▁end|>
<|file_name|>RListItem.java<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA Contact info: [email protected] */ package org.cobraparser.html.renderer; import java.awt.Color; import java.awt.FontMetrics; import java.awt.Graphics; import java.awt.Insets; import org.cobraparser.html.HtmlRendererContext; import org.cobraparser.html.domimpl.NodeImpl; import org.cobraparser.html.style.ListStyle; import org.cobraparser.html.style.RenderState; import org.cobraparser.ua.UserAgentContext; import org.w3c.dom.html.HTMLElement; class RListItem extends BaseRListElement { private static final int BULLET_WIDTH = 5; private static final int BULLET_HEIGHT = 5; private static final int BULLET_RMARGIN = 5; private static final int BULLET_SPACE_WIDTH = 36; public RListItem(final NodeImpl modelNode, final int listNesting, final UserAgentContext pcontext, final HtmlRendererContext rcontext, final FrameContext frameContext, final RenderableContainer parentContainer, final RCollection parent) { super(modelNode, listNesting, pcontext, rcontext, frameContext, parentContainer); // this.defaultMarginInsets = new java.awt.Insets(0, BULLET_SPACE_WIDTH, 0, // 0); } @Override public int getViewportListNesting(final int blockNesting) { return blockNesting + 1; } @Override public void invalidateLayoutLocal() { super.invalidateLayoutLocal(); this.value = null; } private static final Integer UNSET = new Integer(Integer.MIN_VALUE); private Integer value = null; private Integer getValue() { Integer value = this.value; if (value == null) { final HTMLElement node = (HTMLElement) this.modelNode; final String valueText = node == null ? null : node.getAttribute("value"); if (valueText == null) { value = UNSET; } else { try { value = Integer.valueOf(valueText); } catch (final NumberFormatException nfe) { value = UNSET; } } this.value = value; } return value; } private int count; @Override public void doLayout(final int availWidth, final int availHeight, final boolean expandWidth, final boolean expandHeight, final FloatingBoundsSource floatBoundsSource, final int defaultOverflowX, final int defaultOverflowY, final boolean sizeOnly) { super.doLayout(availWidth, availHeight, expandWidth, expandHeight, floatBoundsSource, defaultOverflowX, defaultOverflowY, sizeOnly); // Note: Count must be calculated even if layout is valid. final RenderState renderState = this.modelNode.getRenderState(); final Integer value = this.getValue(); if (value == UNSET) { this.count = renderState.incrementCount(DEFAULT_COUNTER_NAME, this.listNesting); } else { final int newCount = value.intValue(); this.count = newCount; renderState.resetCount(DEFAULT_COUNTER_NAME, this.listNesting, newCount + 1); } } @Override public void paintShifted(final Graphics g) { super.paintShifted(g); final RenderState rs = this.modelNode.getRenderState(); final Insets marginInsets = this.marginInsets; final RBlockViewport layout = this.bodyLayout; final ListStyle listStyle = this.listStyle; int bulletType = listStyle == null ? ListStyle.TYPE_UNSET : listStyle.type; if (bulletType != ListStyle.TYPE_NONE) { if (bulletType == ListStyle.TYPE_UNSET) { RCollection parent = this.getOriginalOrCurrentParent(); if (!(parent instanceof RList)) { parent = parent.getOriginalOrCurrentParent(); } if (parent instanceof RList) { final ListStyle parentListStyle = ((RList) parent).listStyle; bulletType = parentListStyle == null ? ListStyle.TYPE_DISC : parentListStyle.type; } else { bulletType = ListStyle.TYPE_DISC; } } // Paint bullets final Color prevColor = g.getColor(); g.setColor(rs.getColor()); try { final Insets insets = this.getInsets(this.hasHScrollBar, this.hasVScrollBar); final Insets paddingInsets = this.paddingInsets; final int baselineOffset = layout.getFirstBaselineOffset(); final int bulletRight = (marginInsets == null ? 0 : marginInsets.left) - BULLET_RMARGIN; final int bulletBottom = insets.top + baselineOffset + (paddingInsets == null ? 0 : paddingInsets.top); final int bulletTop = bulletBottom - BULLET_HEIGHT; final int bulletLeft = bulletRight - BULLET_WIDTH; final int bulletNumber = this.count; String numberText = null; switch (bulletType) { case ListStyle.TYPE_DECIMAL: numberText = bulletNumber + "."; break; case ListStyle.TYPE_LOWER_ALPHA: numberText = ((char) ('a' + bulletNumber)) + "."; break; case ListStyle.TYPE_UPPER_ALPHA: numberText = ((char) ('A' + bulletNumber)) + "."; break; case ListStyle.TYPE_DISC: g.fillOval(bulletLeft, bulletTop, BULLET_WIDTH, BULLET_HEIGHT); break; case ListStyle.TYPE_CIRCLE: g.drawOval(bulletLeft, bulletTop, BULLET_WIDTH, BULLET_HEIGHT); break; case ListStyle.TYPE_SQUARE: g.fillRect(bulletLeft, bulletTop, BULLET_WIDTH, BULLET_HEIGHT); break; } if (numberText != null) { final FontMetrics fm = g.getFontMetrics(); final int numberLeft = bulletRight - fm.stringWidth(numberText); final int numberY = bulletBottom; g.drawString(numberText, numberLeft, numberY); } } finally { g.setColor(prevColor); } } } }<|fim▁end|>
GNU LESSER GENERAL PUBLIC LICENSE Copyright (C) 2006 The Lobo Project
<|file_name|>dirac-dms-user-lfns.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ######################################################################## # $HeadURL$ ######################################################################## """ Get the list of all the user files. """ __RCSID__ = "$Id$" from DIRAC.Core.Base import Script days = 0 months = 0 years = 0 wildcard = None baseDir = '' emptyDirsFlag = False Script.registerSwitch( "D:", "Days=", "Match files older than number of days [%s]" % days ) Script.registerSwitch( "M:", "Months=", "Match files older than number of months [%s]" % months ) Script.registerSwitch( "Y:", "Years=", "Match files older than number of years [%s]" % years ) Script.registerSwitch( "w:", "Wildcard=", "Wildcard for matching filenames [All]" ) Script.registerSwitch( "b:", "BaseDir=", "Base directory to begin search (default /[vo]/user/[initial]/[username])" ) Script.registerSwitch( "e", "EmptyDirs", "Create a list of empty directories" ) Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1], 'Usage:', ' %s [option|cfgfile] ...' % Script.scriptName, ] ) ) <|fim▁hole|>for switch in Script.getUnprocessedSwitches(): if switch[0] == "D" or switch[0].lower() == "days": days = int( switch[1] ) if switch[0] == "M" or switch[0].lower() == "months": months = int( switch[1] ) if switch[0] == "Y" or switch[0].lower() == "years": years = int( switch[1] ) if switch[0].lower() == "w" or switch[0].lower() == "wildcard": wildcard = switch[1] if switch[0].lower() == "b" or switch[0].lower() == "basedir": baseDir = switch[1] if switch[0].lower() == "e" or switch[0].lower() == "emptydirs": emptyDirsFlag = True import DIRAC from DIRAC import gLogger from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getVOForGroup from DIRAC.Core.Security.ProxyInfo import getProxyInfo from DIRAC.Resources.Catalog.FileCatalog import FileCatalog from DIRAC.Core.Utilities.List import sortList from datetime import datetime, timedelta import sys, os, time, fnmatch fc = FileCatalog() def isOlderThan( cTimeStruct, days ): timeDelta = timedelta( days = days ) maxCTime = datetime.utcnow() - timeDelta if cTimeStruct < maxCTime: return True return False withMetadata = False if days or months or years: withMetadata = True totalDays = 0 if years: totalDays += 365 * years if months: totalDays += 30 * months if days: totalDays += days res = getProxyInfo( False, False ) if not res['OK']: gLogger.error( "Failed to get client proxy information.", res['Message'] ) DIRAC.exit( 2 ) proxyInfo = res['Value'] username = proxyInfo['username'] vo = '' if 'group' in proxyInfo: vo = getVOForGroup( proxyInfo['group'] ) if not baseDir: if not vo: gLogger.error( 'Could not determine VO' ) Script.showHelp() baseDir = '/%s/user/%s/%s' % ( vo, username[0], username ) baseDir = baseDir.rstrip( '/' ) gLogger.info( 'Will search for files in %s' % baseDir ) activeDirs = [baseDir] allFiles = [] emptyDirs = [] while len( activeDirs ) > 0: currentDir = activeDirs.pop() res = fc.listDirectory( currentDir, withMetadata, timeout = 360 ) if not res['OK']: gLogger.error( "Error retrieving directory contents", "%s %s" % ( currentDir, res['Message'] ) ) elif currentDir in res['Value']['Failed']: gLogger.error( "Error retrieving directory contents", "%s %s" % ( currentDir, res['Value']['Failed'][currentDir] ) ) else: dirContents = res['Value']['Successful'][currentDir] subdirs = dirContents['SubDirs'] files = dirContents['Files'] if not subdirs and not files: emptyDirs.append( currentDir ) gLogger.notice( '%s: empty directory' % currentDir ) else: for subdir in sorted( subdirs, reverse = True ): if ( not withMetadata ) or isOlderThan( subdirs[subdir]['CreationDate'], totalDays ): activeDirs.append( subdir ) for filename in sorted( files ): fileOK = False if ( not withMetadata ) or isOlderThan( files[filename]['MetaData']['CreationDate'], totalDays ): if wildcard is None or fnmatch.fnmatch( filename, wildcard ): fileOK = True if not fileOK: files.pop( filename ) allFiles += sorted( files ) gLogger.notice( "%s: %d files%s, %d sub-directories" % ( currentDir, len( files ), ' matching' if withMetadata or wildcard else '', len( subdirs ) ) ) outputFileName = '%s.lfns' % baseDir.replace( '/%s' % vo, '%s' % vo ).replace( '/', '-' ) outputFile = open( outputFileName, 'w' ) for lfn in sortList( allFiles ): outputFile.write( lfn + '\n' ) outputFile.close() gLogger.notice( '%d matched files have been put in %s' % ( len( allFiles ), outputFileName ) ) if emptyDirsFlag: outputFileName = '%s.emptydirs' % baseDir.replace( '/%s' % vo, '%s' % vo ).replace( '/', '-' ) outputFile = open( outputFileName, 'w' ) for dir in sortList( emptyDirs ): outputFile.write( dir + '\n' ) outputFile.close() gLogger.notice( '%d empty directories have been put in %s' % ( len( emptyDirs ), outputFileName ) ) DIRAC.exit( 0 )<|fim▁end|>
Script.parseCommandLine( ignoreErrors = False )
<|file_name|>timerservice_impl.py<|end_file_name|><|fim▁begin|>################################################################################ # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ import collections import time from enum import Enum from pyflink.datastream import TimerService from pyflink.datastream.timerservice import InternalTimer, K, N, InternalTimerService from pyflink.fn_execution.state_impl import RemoteKeyedStateBackend class InternalTimerImpl(InternalTimer[K, N]): def __init__(self, timestamp: int, key: K, namespace: N): self._timestamp = timestamp self._key = key<|fim▁hole|> def get_timestamp(self) -> int: return self._timestamp def get_key(self) -> K: return self._key def get_namespace(self) -> N: return self._namespace def __hash__(self): result = int(self._timestamp ^ (self._timestamp >> 32)) result = 31 * result + hash(tuple(self._key)) result = 31 * result + hash(self._namespace) return result def __eq__(self, other): return self.__class__ == other.__class__ and self._timestamp == other._timestamp \ and self._key == other._key and self._namespace == other._namespace class TimerOperandType(Enum): REGISTER_EVENT_TIMER = 0 REGISTER_PROC_TIMER = 1 DELETE_EVENT_TIMER = 2 DELETE_PROC_TIMER = 3 class InternalTimerServiceImpl(InternalTimerService[N]): """ Internal implementation of InternalTimerService. """ def __init__(self, keyed_state_backend: RemoteKeyedStateBackend): self._keyed_state_backend = keyed_state_backend self._current_watermark = None self.timers = collections.OrderedDict() def current_processing_time(self): return int(time.time() * 1000) def current_watermark(self): return self._current_watermark def advance_watermark(self, watermark: int): self._current_watermark = watermark def register_processing_time_timer(self, namespace: N, t: int): current_key = self._keyed_state_backend.get_current_key() timer = (TimerOperandType.REGISTER_PROC_TIMER, InternalTimerImpl(t, current_key, namespace)) self.timers[timer] = None def register_event_time_timer(self, namespace: N, t: int): current_key = self._keyed_state_backend.get_current_key() timer = (TimerOperandType.REGISTER_EVENT_TIMER, InternalTimerImpl(t, current_key, namespace)) self.timers[timer] = None def delete_processing_time_timer(self, namespace: N, t: int): current_key = self._keyed_state_backend.get_current_key() timer = (TimerOperandType.DELETE_PROC_TIMER, InternalTimerImpl(t, current_key, namespace)) self.timers[timer] = None def delete_event_time_timer(self, namespace: N, t: int): current_key = self._keyed_state_backend.get_current_key() timer = (TimerOperandType.DELETE_EVENT_TIMER, InternalTimerImpl(t, current_key, namespace)) self.timers[timer] = None class TimerServiceImpl(TimerService): """ Internal implementation of TimerService. """ def __init__(self, internal_timer_service: InternalTimerServiceImpl): self._internal = internal_timer_service self.timers = self._internal.timers def current_processing_time(self) -> int: return self._internal.current_processing_time() def current_watermark(self) -> int: return self._internal.current_watermark() def advance_watermark(self, wm): self._internal.advance_watermark(wm) def register_processing_time_timer(self, t: int): self._internal.register_processing_time_timer(None, t) def register_event_time_timer(self, t: int): self._internal.register_event_time_timer(None, t) def delete_processing_time_timer(self, t: int): self._internal.delete_processing_time_timer(None, t) def delete_event_time_timer(self, t: int): self._internal.delete_event_time_timer(None, t)<|fim▁end|>
self._namespace = namespace
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from importlib import import_module from django.apps import AppConfig as BaseAppConfig <|fim▁hole|> name = "gestioneide" def ready(self): import_module("gestioneide.receivers")<|fim▁end|>
class AppConfig(BaseAppConfig):
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>"""Tests for the Plaato integration."""<|fim▁end|>
<|file_name|>south_settings.py<|end_file_name|><|fim▁begin|>""" These settings are used by the ``manage.py`` command. With normal tests we want to use the fastest possible way which is an in-memory sqlite database but if you want to create South migrations you need a persistant database. Unfortunately there seems to be an issue with either South or syncdb so that defining two routers ("default" and "south") does not work. """ from cmsplugin_redirect.tests.test_settings import * # NOQA DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'db.sqlite',<|fim▁hole|><|fim▁end|>
} } INSTALLED_APPS.append('south', )
<|file_name|>buttons.py<|end_file_name|><|fim▁begin|>import numpy import argparse import time import mido import colorsys import gitgrid.gridcontroller import gitgrid.utils.utils args = gitgrid.utils.utils.controller_args() tmp = gitgrid.gridcontroller.create(args.controller, args.input, args.output) def toggle(x, y, Message): curr = tmp.lights[x, y, :] / 255. hsv = list(colorsys.rgb_to_hsv(*curr)) hsv[0] += 0.1 tmp.lights[x, y, :] = numpy.array(colorsys.hsv_to_rgb(*hsv)) * 255. def foo(action, message): print action tmp.lights[:, :] = numpy.array([1, 0, 0]) tmp.buttons[:, :] = toggle tmp.actions = { 'up': foo,<|fim▁hole|> 'down': foo, 'left': foo, 'right': foo, 'tab1': foo, 'tab2': foo, 'tab3': foo, 'tab4': foo, 'ok': foo, 'cancel': foo, } tmp.loop()<|fim▁end|>
<|file_name|>installp.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2017, Kairo Araujo <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: installp author: - Kairo Araujo (@kairoaraujo) short_description: Manage packages on AIX description: - Manage packages using 'installp' on AIX version_added: '2.8' options: accept_license: description: - Whether to accept the license for the package(s). type: bool default: no name: description: - One or more packages to install or remove. - Use C(all) to install all packages available on informed C(repository_path). type: list required: true aliases: [ pkg ] repository_path: description: - Path with AIX packages (required to install). type: path state: description: - Whether the package needs to be present on or absent from the system. type: str choices: [ absent, present ] default: present notes: - If the package is already installed, even the package/fileset is new, the module will not install it. ''' EXAMPLES = r''' - name: Install package foo installp: name: foo<|fim▁hole|>- name: Install bos.sysmgt that includes bos.sysmgt.nim.master, bos.sysmgt.nim.spot installp: name: bos.sysmgt repository_path: /repository/AIX71/installp/base package_license: yes state: present - name: Install bos.sysmgt.nim.master only installp: name: bos.sysmgt.nim.master repository_path: /repository/AIX71/installp/base package_license: yes state: present - name: Install bos.sysmgt.nim.master and bos.sysmgt.nim.spot installp: name: bos.sysmgt.nim.master, bos.sysmgt.nim.spot repository_path: /repository/AIX71/installp/base package_license: yes state: present - name: Remove packages bos.sysmgt.nim.master installp: name: bos.sysmgt.nim.master state: absent ''' RETURN = r''' # ''' import os import re from ansible.module_utils.basic import AnsibleModule def _check_new_pkg(module, package, repository_path): """ Check if the package of fileset is correct name and repository path. :param module: Ansible module arguments spec. :param package: Package/fileset name. :param repository_path: Repository package path. :return: Bool, package information. """ if os.path.isdir(repository_path): installp_cmd = module.get_bin_path('installp', True) rc, package_result, err = module.run_command("%s -l -MR -d %s" % (installp_cmd, repository_path)) if rc != 0: module.fail_json(msg="Failed to run installp.", rc=rc, err=err) if package == 'all': pkg_info = "All packages on dir" return True, pkg_info else: pkg_info = {} for line in package_result.splitlines(): if re.findall(package, line): pkg_name = line.split()[0].strip() pkg_version = line.split()[1].strip() pkg_info[pkg_name] = pkg_version return True, pkg_info return False, None else: module.fail_json(msg="Repository path %s is not valid." % repository_path) def _check_installed_pkg(module, package, repository_path): """ Check the package on AIX. It verifies if the package is installed and informations :param module: Ansible module parameters spec. :param package: Package/fileset name. :param repository_path: Repository package path. :return: Bool, package data. """ lslpp_cmd = module.get_bin_path('lslpp', True) rc, lslpp_result, err = module.run_command("%s -lcq %s*" % (lslpp_cmd, package)) if rc == 1: package_state = ' '.join(err.split()[-2:]) if package_state == 'not installed.': return False, None else: module.fail_json(msg="Failed to run lslpp.", rc=rc, err=err) if rc != 0: module.fail_json(msg="Failed to run lslpp.", rc=rc, err=err) pkg_data = {} full_pkg_data = lslpp_result.splitlines() for line in full_pkg_data: pkg_name, fileset, level = line.split(':')[0:3] pkg_data[pkg_name] = fileset, level return True, pkg_data def remove(module, installp_cmd, packages): repository_path = None remove_count = 0 removed_pkgs = [] not_found_pkg = [] for package in packages: pkg_check, dummy = _check_installed_pkg(module, package, repository_path) if pkg_check: if not module.check_mode: rc, remove_out, err = module.run_command("%s -u %s" % (installp_cmd, package)) if rc != 0: module.fail_json(msg="Failed to run installp.", rc=rc, err=err) remove_count += 1 removed_pkgs.append(package) else: not_found_pkg.append(package) if remove_count > 0: if len(not_found_pkg) > 1: not_found_pkg.insert(0, "Package(s) not found: ") changed = True msg = "Packages removed: %s. %s " % (' '.join(removed_pkgs), ' '.join(not_found_pkg)) else: changed = False msg = ("No packages removed, all packages not found: %s" % ' '.join(not_found_pkg)) return changed, msg def install(module, installp_cmd, packages, repository_path, accept_license): installed_pkgs = [] not_found_pkgs = [] already_installed_pkgs = {} accept_license_param = { True: '-Y', False: '', } # Validate if package exists on repository path. for package in packages: pkg_check, pkg_data = _check_new_pkg(module, package, repository_path) # If package exists on repository path, check if package is installed. if pkg_check: pkg_check_current, pkg_info = _check_installed_pkg(module, package, repository_path) # If package is already installed. if pkg_check_current: # Check if package is a package and not a fileset, get version # and add the package into already installed list if package in pkg_info.keys(): already_installed_pkgs[package] = pkg_info[package][1] else: # If the package is not a package but a fileset, confirm # and add the fileset/package into already installed list for key in pkg_info.keys(): if package in pkg_info[key]: already_installed_pkgs[package] = pkg_info[key][1] else: if not module.check_mode: rc, out, err = module.run_command("%s -a %s -X -d %s %s" % (installp_cmd, accept_license_param[accept_license], repository_path, package)) if rc != 0: module.fail_json(msg="Failed to run installp", rc=rc, err=err) installed_pkgs.append(package) else: not_found_pkgs.append(package) if len(installed_pkgs) > 0: installed_msg = (" Installed: %s." % ' '.join(installed_pkgs)) else: installed_msg = '' if len(not_found_pkgs) > 0: not_found_msg = (" Not found: %s." % ' '.join(not_found_pkgs)) else: not_found_msg = '' if len(already_installed_pkgs) > 0: already_installed_msg = (" Already installed: %s." % already_installed_pkgs) else: already_installed_msg = '' if len(installed_pkgs) > 0: changed = True msg = ("%s%s%s" % (installed_msg, not_found_msg, already_installed_msg)) else: changed = False msg = ("No packages installed.%s%s%s" % (installed_msg, not_found_msg, already_installed_msg)) return changed, msg def main(): module = AnsibleModule( argument_spec=dict( name=dict(type='list', required=True, aliases=['pkg']), repository_path=dict(type='path'), accept_license=dict(type='bool', default=False), state=dict(type='str', default='present', choices=['absent', 'present']), ), supports_check_mode=True, ) name = module.params['name'] repository_path = module.params['repository_path'] accept_license = module.params['accept_license'] state = module.params['state'] installp_cmd = module.get_bin_path('installp', True) if state == 'present': if repository_path is None: module.fail_json(msg="repository_path is required to install package") changed, msg = install(module, installp_cmd, name, repository_path, accept_license) elif state == 'absent': changed, msg = remove(module, installp_cmd, name) else: module.fail_json(changed=False, msg="Unexpected state.") module.exit_json(changed=changed, msg=msg) if __name__ == '__main__': main()<|fim▁end|>
repository_path: /repository/AIX71/installp/base package_license: yes state: present
<|file_name|>size.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ //! Generic type for CSS properties that are composed by two dimensions. use crate::parser::ParserContext; use crate::Zero; use cssparser::Parser; use std::fmt::{self, Write}; use style_traits::{CssWriter, ParseError, ToCss}; /// A generic size, for `border-*-radius` longhand properties, or /// `border-spacing`. #[derive( Animate, Clone, ComputeSquaredDistance, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToAnimatedZero, ToAnimatedValue, ToComputedValue, ToResolvedValue, ToShmem, )] #[allow(missing_docs)] #[repr(C)] pub struct Size2D<L> { pub width: L, pub height: L, } impl<L> Size2D<L> { #[inline] /// Create a new `Size2D` for an area of given width and height.<|fim▁hole|> } /// Returns the width component. pub fn width(&self) -> &L { &self.width } /// Returns the height component. pub fn height(&self) -> &L { &self.height } /// Parse a `Size2D` with a given parsing function. pub fn parse_with<'i, 't, F>( context: &ParserContext, input: &mut Parser<'i, 't>, parse_one: F, ) -> Result<Self, ParseError<'i>> where L: Clone, F: Fn(&ParserContext, &mut Parser<'i, 't>) -> Result<L, ParseError<'i>>, { let first = parse_one(context, input)?; let second = input .try_parse(|i| parse_one(context, i)) .unwrap_or_else(|_| first.clone()); Ok(Self::new(first, second)) } } impl<L> ToCss for Size2D<L> where L: ToCss + PartialEq, { fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write, { self.width.to_css(dest)?; if self.height != self.width { dest.write_str(" ")?; self.height.to_css(dest)?; } Ok(()) } } impl<L: Zero> Zero for Size2D<L> { fn zero() -> Self { Self::new(L::zero(), L::zero()) } fn is_zero(&self) -> bool { self.width.is_zero() && self.height.is_zero() } }<|fim▁end|>
pub fn new(width: L, height: L) -> Self { Self { width, height }
<|file_name|>supported-languages.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core'; @Component({ selector: 'app-supported-languages', templateUrl: './supported-languages.component.html',<|fim▁hole|>}) export class SupportedLanguagesComponent implements OnInit { constructor() { } ngOnInit() { } }<|fim▁end|>
styleUrls: ['./supported-languages.component.sass']
<|file_name|>level_bar.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (5c017c9) from gir-files (71d73f0) // DO NOT EDIT #[cfg(feature = "v3_6")] use LevelBarMode; use Orientable; use Widget; use ffi; use glib; use glib::object::Downcast; use glib::object::IsA; #[cfg(feature = "v3_6")] use glib::signal::connect; use glib::translate::*; #[cfg(feature = "v3_6")] use glib_ffi; #[cfg(feature = "v3_6")] use libc; #[cfg(feature = "v3_6")] use std::boxed::Box as Box_; #[cfg(feature = "v3_6")] use std::mem; #[cfg(feature = "v3_6")] use std::mem::transmute; glib_wrapper! { pub struct LevelBar(Object<ffi::GtkLevelBar>): Widget, Orientable; match fn { get_type => || ffi::gtk_level_bar_get_type(), } } impl LevelBar { #[cfg(feature = "v3_6")] pub fn new() -> LevelBar { assert_initialized_main_thread!(); unsafe { Widget::from_glib_none(ffi::gtk_level_bar_new()).downcast_unchecked() } } #[cfg(feature = "v3_6")] pub fn new_for_interval(min_value: f64, max_value: f64) -> LevelBar { assert_initialized_main_thread!(); unsafe { Widget::from_glib_none(ffi::gtk_level_bar_new_for_interval(min_value, max_value)).downcast_unchecked() } } } pub trait LevelBarExt { #[cfg(feature = "v3_6")] fn add_offset_value(&self, name: &str, value: f64); #[cfg(feature = "v3_8")] fn get_inverted(&self) -> bool; #[cfg(feature = "v3_6")] fn get_max_value(&self) -> f64; #[cfg(feature = "v3_6")] fn get_min_value(&self) -> f64; #[cfg(feature = "v3_6")] fn get_mode(&self) -> LevelBarMode;<|fim▁hole|> #[cfg(feature = "v3_6")] fn get_value(&self) -> f64; #[cfg(feature = "v3_6")] fn remove_offset_value<'a, P: Into<Option<&'a str>>>(&self, name: P); #[cfg(feature = "v3_8")] fn set_inverted(&self, inverted: bool); #[cfg(feature = "v3_6")] fn set_max_value(&self, value: f64); #[cfg(feature = "v3_6")] fn set_min_value(&self, value: f64); #[cfg(feature = "v3_6")] fn set_mode(&self, mode: LevelBarMode); #[cfg(feature = "v3_6")] fn set_value(&self, value: f64); #[cfg(feature = "v3_6")] fn connect_offset_changed<F: Fn(&Self, &str) + 'static>(&self, f: F) -> u64; } impl<O: IsA<LevelBar> + IsA<glib::object::Object>> LevelBarExt for O { #[cfg(feature = "v3_6")] fn add_offset_value(&self, name: &str, value: f64) { unsafe { ffi::gtk_level_bar_add_offset_value(self.to_glib_none().0, name.to_glib_none().0, value); } } #[cfg(feature = "v3_8")] fn get_inverted(&self) -> bool { unsafe { from_glib(ffi::gtk_level_bar_get_inverted(self.to_glib_none().0)) } } #[cfg(feature = "v3_6")] fn get_max_value(&self) -> f64 { unsafe { ffi::gtk_level_bar_get_max_value(self.to_glib_none().0) } } #[cfg(feature = "v3_6")] fn get_min_value(&self) -> f64 { unsafe { ffi::gtk_level_bar_get_min_value(self.to_glib_none().0) } } #[cfg(feature = "v3_6")] fn get_mode(&self) -> LevelBarMode { unsafe { from_glib(ffi::gtk_level_bar_get_mode(self.to_glib_none().0)) } } #[cfg(feature = "v3_6")] fn get_offset_value<'a, P: Into<Option<&'a str>>>(&self, name: P) -> Option<f64> { let name = name.into(); let name = name.to_glib_none(); unsafe { let mut value = mem::uninitialized(); let ret = from_glib(ffi::gtk_level_bar_get_offset_value(self.to_glib_none().0, name.0, &mut value)); if ret { Some(value) } else { None } } } #[cfg(feature = "v3_6")] fn get_value(&self) -> f64 { unsafe { ffi::gtk_level_bar_get_value(self.to_glib_none().0) } } #[cfg(feature = "v3_6")] fn remove_offset_value<'a, P: Into<Option<&'a str>>>(&self, name: P) { let name = name.into(); let name = name.to_glib_none(); unsafe { ffi::gtk_level_bar_remove_offset_value(self.to_glib_none().0, name.0); } } #[cfg(feature = "v3_8")] fn set_inverted(&self, inverted: bool) { unsafe { ffi::gtk_level_bar_set_inverted(self.to_glib_none().0, inverted.to_glib()); } } #[cfg(feature = "v3_6")] fn set_max_value(&self, value: f64) { unsafe { ffi::gtk_level_bar_set_max_value(self.to_glib_none().0, value); } } #[cfg(feature = "v3_6")] fn set_min_value(&self, value: f64) { unsafe { ffi::gtk_level_bar_set_min_value(self.to_glib_none().0, value); } } #[cfg(feature = "v3_6")] fn set_mode(&self, mode: LevelBarMode) { unsafe { ffi::gtk_level_bar_set_mode(self.to_glib_none().0, mode.to_glib()); } } #[cfg(feature = "v3_6")] fn set_value(&self, value: f64) { unsafe { ffi::gtk_level_bar_set_value(self.to_glib_none().0, value); } } #[cfg(feature = "v3_6")] fn connect_offset_changed<F: Fn(&Self, &str) + 'static>(&self, f: F) -> u64 { unsafe { let f: Box_<Box_<Fn(&Self, &str) + 'static>> = Box_::new(Box_::new(f)); connect(self.to_glib_none().0, "offset-changed", transmute(offset_changed_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _) } } } #[cfg(feature = "v3_6")] unsafe extern "C" fn offset_changed_trampoline<P>(this: *mut ffi::GtkLevelBar, name: *mut libc::c_char, f: glib_ffi::gpointer) where P: IsA<LevelBar> { callback_guard!(); let f: &Box_<Fn(&P, &str) + 'static> = transmute(f); f(&LevelBar::from_glib_none(this).downcast_unchecked(), &String::from_glib_none(name)) }<|fim▁end|>
#[cfg(feature = "v3_6")] fn get_offset_value<'a, P: Into<Option<&'a str>>>(&self, name: P) -> Option<f64>;
<|file_name|>ContainerTurboFurnace.java<|end_file_name|><|fim▁begin|>package com.projectreddog.machinemod.container; import com.projectreddog.machinemod.inventory.SlotBlazePowder; import com.projectreddog.machinemod.inventory.SlotNotBlazePowder; import com.projectreddog.machinemod.inventory.SlotOutputOnlyTurobFurnace; import com.projectreddog.machinemod.tileentities.TileEntityTurboFurnace; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.InventoryPlayer; import net.minecraft.inventory.Container; import net.minecraft.inventory.IContainerListener; import net.minecraft.inventory.Slot; import net.minecraft.item.ItemStack; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; ; public class ContainerTurboFurnace extends Container { protected TileEntityTurboFurnace turbofurnace; protected int lastFuleBurnTimeRemaining = 0; protected int lastProcessingTimeRemaining = 0; public ContainerTurboFurnace(InventoryPlayer inventoryPlayer, TileEntityTurboFurnace turbofurnace) { this.turbofurnace = turbofurnace; lastFuleBurnTimeRemaining = -1; lastProcessingTimeRemaining = -1; // for (int i = 0; i < 1; i++) { // for (int j = 0; j < 3; j++) { addSlotToContainer(new SlotNotBlazePowder(turbofurnace, 0, 47, 34)); addSlotToContainer(new SlotOutputOnlyTurobFurnace(inventoryPlayer.player, turbofurnace, 1, 110, 53)); addSlotToContainer(new SlotBlazePowder(turbofurnace, 2, 47, 74)); // } // } // commonly used vanilla code that adds the player's inventory bindPlayerInventory(inventoryPlayer); } @Override public boolean canInteractWith(EntityPlayer player) { return turbofurnace.isUsableByPlayer(player); } protected void bindPlayerInventory(InventoryPlayer inventoryPlayer) { for (int i = 0; i < 3; i++) { for (int j = 0; j < 9; j++) { addSlotToContainer(new Slot(inventoryPlayer, j + i * 9 + 9, 8 + j * 18, 139 + i * 18)); } } for (int i = 0; i < 9; i++) { addSlotToContainer(new Slot(inventoryPlayer, i, 8 + i * 18, 197)); } } @Override public ItemStack transferStackInSlot(EntityPlayer player, int slot) { ItemStack stack = ItemStack.EMPTY; Slot slotObject = (Slot) inventorySlots.get(slot); <|fim▁hole|> ItemStack stackInSlot = slotObject.getStack(); stack = stackInSlot.copy(); // merges the item into player inventory since its in the Entity if (slot < 3) { if (!this.mergeItemStack(stackInSlot, 3, this.inventorySlots.size(), true)) { return ItemStack.EMPTY; } slotObject.onSlotChange(stackInSlot, stack); } // places it into the tileEntity is possible since its in the player // inventory else if (!this.mergeItemStack(stackInSlot, 0, 3, false)) { return ItemStack.EMPTY; } if (stackInSlot.getCount() == 0) { slotObject.putStack(ItemStack.EMPTY); } else { slotObject.onSlotChanged(); } if (stackInSlot.getCount() == stack.getCount()) { return ItemStack.EMPTY; } slotObject.onTake(player, stackInSlot); } return stack; } /** * Looks for changes made in the container, sends them to every listener. */ public void detectAndSendChanges() { super.detectAndSendChanges(); for (int i = 0; i < this.listeners.size(); ++i) { IContainerListener icrafting = (IContainerListener) this.listeners.get(i); if (this.lastFuleBurnTimeRemaining != this.turbofurnace.getField(0)) { icrafting.sendWindowProperty(this, 0, this.turbofurnace.getField(0)); } if (this.lastProcessingTimeRemaining != this.turbofurnace.getField(1)) { icrafting.sendWindowProperty(this, 1, this.turbofurnace.getField(1)); } } this.lastFuleBurnTimeRemaining = this.turbofurnace.getField(0); this.lastProcessingTimeRemaining = this.turbofurnace.getField(1); } @SideOnly(Side.CLIENT) public void updateProgressBar(int id, int data) { this.turbofurnace.setField(id, data); } }<|fim▁end|>
// null checks and checks if the item can be stacked (maxStackSize > 1) if (slotObject != null && slotObject.getHasStack()) {
<|file_name|>typeinference.js<|end_file_name|><|fim▁begin|>(function (ns) { // dependencies var assert = require('assert'); var esgraph = require('esgraph'); var worklist = require('analyses'); var common = require("../../base/common.js"); var Context = require("../../base/context.js"); var Base = require("../../base/index.js"); var codegen = require('escodegen'); var annotateRight = require("./infer_expression.js"); var InferenceScope = require("./registry/").InferenceScope; var System = require("./registry/system.js"); var Annotations = require("./../../type-system/annotation.js"); var walk = require('estraverse'); var Tools = require("../settools.js"); var Shade = require("../../interfaces.js"); var walkes = require('walkes'); var validator = require('../validator'); var TypeInfo = require("../../type-system/typeinfo.js").TypeInfo; // shortcuts var Syntax = common.Syntax; var Set = worklist.Set; var FunctionAnnotation = Annotations.FunctionAnnotation; var ANNO = Annotations.ANNO;<|fim▁hole|> function findConstantsFor(ast, names, constantVariables) { var result = new Set(), annotation, name, formerValue; constantVariables = constantVariables ? constantVariables.values() : []; walkes(ast, { AssignmentExpression: function(recurse) { if (this.left.type != Syntax.Identifier) { Shade.throwError(ast, "Can't find constant for computed left expression"); } name = this.left.name; if(names.has(name)) { annotation = ANNO(this.right); if(annotation.hasConstantValue()) { switch(this.operator) { case "=": result.add({ name: name, constant: TypeInfo.copyStaticValue(annotation)}); break; case "-=": case "+=": case "*=": case "/=": formerValue = constantVariables.filter(function(v){ return v.name == name; }); if(formerValue.length) { var c = formerValue[0].constant, v; switch(this.operator) { case "+=": v = c + TypeInfo.copyStaticValue(annotation); break; case "-=": v = c - TypeInfo.copyStaticValue(annotation); break; case "*=": v = c * TypeInfo.copyStaticValue(annotation); break; case "/=": v = c / TypeInfo.copyStaticValue(annotation); break; } result.add({ name: name, constant: v}); } break; default: assert(!this.operator); } } } recurse(this.right); }, VariableDeclarator: function(recurse) { name = this.id.name; if (this.init && names.has(name)) { annotation = ANNO(this.init); if(annotation.hasConstantValue()) { result.add({ name: name, constant: TypeInfo.copyStaticValue(annotation)}); } } recurse(this.init); }, UpdateExpression: function(recurse) { if(this.argument.type == Syntax.Identifier) { name = this.argument.name; annotation = ANNO(this); if(annotation.hasConstantValue()) { var value = TypeInfo.copyStaticValue(annotation); if (!this.prefix) { value = this.operator == "--" ? --value : ++value; } result.add({ name: name, constant: value}); } } } }); return result; } /** * * @param ast * @param {AnalysisContext} context * @param {*} opt * @constructor */ var TypeInference = function (ast, context, opt) { opt = opt || {}; this.context = context; this.propagateConstants = opt.propagateConstants || false; }; Base.extend(TypeInference.prototype, { /** * @param {*} ast * @param {*} opt * @returns {*} */ inferBody: function (ast, opt) { var cfg = esgraph(ast, { omitExceptions: true }), context = this.context, propagateConstants = this.propagateConstants; //console.log("infer body", cfg) var result = worklist(cfg, /** * @param {Set} input * @this {FlowNode} * @returns {*} */ function (input) { if (!this.astNode || this.type) // Start and end node do not influence the result return input; //console.log("Analyze", codegen.generate(this.astNode), this.astNode.type); // Local if(propagateConstants) { this.kill = this.kill || Tools.findVariableAssignments(this.astNode, true); } annotateRight(context, this.astNode, propagateConstants ? input : null ); this.decl = this.decl || context.declare(this.astNode); //context.computeConstants(this.astNode, input); if(!propagateConstants) { return input; } var filteredInput = null, generate = null; if (this.kill.size) { // Only if there's an assignment, we need to generate generate = findConstantsFor(this.astNode, this.kill, propagateConstants ? input : null); var that = this; filteredInput = new Set(input.filter(function (elem) { return !that.kill.some(function(tokill) { return elem.name == tokill }); })); } var result = Set.union(filteredInput || input, generate); // console.log("input:", input); // console.log("kill:", this.kill); // console.log("generate:", generate); // console.log("filteredInput:", filteredInput); // console.log("result:", result); return result; } , { direction: 'forward', merge: worklist.merge(function(a,b) { if (!a && !b) return null; //console.log("Merge", a && a.values(), b && b.values()) var result = Set.intersect(a, b); //console.log("Result", result && result.values()) return result; }) }); //Tools.printMap(result, cfg); return ast; } }); /** * * @param ast * @param {AnalysisContext} context * @param opt * @returns {*} */ var inferProgram = function (ast, context, opt) { opt = opt || {}; //var globalScope = createGlobalScope(ast); //registerSystemInformation(globalScope, opt); var typeInference = new TypeInference(ast, context, opt); var result = typeInference.inferBody(ast, opt); return result; }; ns.infer = inferProgram; }(exports));<|fim▁end|>
<|file_name|>abstractpattern.js<|end_file_name|><|fim▁begin|>// Copyright 2007 The Closure Library Authors. All Rights Reserved. //<|fim▁hole|>// // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview DOM pattern base class. * * @author [email protected] (Robby Walker) */ goog.provide('goog.dom.pattern.AbstractPattern'); goog.require('goog.dom.TagWalkType'); goog.require('goog.dom.pattern.MatchType'); /** * Base pattern class for DOM matching. * * @constructor */ goog.dom.pattern.AbstractPattern = function() { /** * The first node matched by this pattern. * @type {Node} */ this.matchedNode = null; }; /** * Reset any internal state this pattern keeps. */ goog.dom.pattern.AbstractPattern.prototype.reset = function() { // The base implementation does nothing. }; /** * Test whether this pattern matches the given token. * * @param {Node} token Token to match against. * @param {goog.dom.TagWalkType} type The type of token. * @return {goog.dom.pattern.MatchType} `MATCH` if the pattern matches. */ goog.dom.pattern.AbstractPattern.prototype.matchToken = function(token, type) { return goog.dom.pattern.MatchType.NO_MATCH; };<|fim▁end|>
// Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at
<|file_name|>create-item.js<|end_file_name|><|fim▁begin|>import { Application } from 'spectron'; import electron from 'electron-prebuilt'; import { assert } from 'chai'; import MockServer from '../util/mock-server'; import items from '../test-cases/items'; import models from '../test-cases/models'; describe('Creating an Item', function () { this.timeout(10000); let app; let mockServer = new MockServer(); before(() => { app = new Application({ path: electron, args: ['index.js', '--port=8080'] }); return app.start().then(() => { return mockServer.start(8080); }); }); beforeEach(() => { mockServer.clearExpectations(); }); it('creates a new item', () => { mockServer.expect({ method: 'get', endpoint: 'item/all', response: { status: 'success', data: { items: items.slice(0, 3) } } }); mockServer.expect({ method: 'get', endpoint: 'model/all', response: { status: 'success', data: { models } } }); mockServer.expect({ method: 'get', endpoint: 'model/all', response: { status: 'success', data: { models } } }); mockServer.expect({ method: 'post', endpoint: 'item', json: { modelAddress: models[0].address }, response: { status: 'success', data: { address: 'iGwEZVvgu', modelName: 'Resistor' } } }); models[0].count ++; return app.client.click('#view-items').then(() => { return app.client.waitForVisible('#items', 5000); }).then(() => { return app.client.click('#items button'); }).then(() => { return app.client.waitForVisible('.create-item-form', 1000); }).then(() => { return app.client.getValue('.create-item-form select option'); }).then(vals => { assert.include(vals, models[0].address); assert.include(vals, models[1].address); return app.client.selectByValue('.create-item-form select', models[0].address); }).then(() => { return app.client.submitForm('.create-item-form form'); }).then(() => { return app.client.waitForVisible('.toast', 2500); }).then(() => { return app.client.getText('.toast'); }).then(text => { assert.strictEqual(text, 'New item added: Resistor (iGwEZVvgu)');<|fim▁hole|> return app.client.elements('#items .item'); }).then(elements => { assert.lengthOf(elements.value, 3); mockServer.validate(); }); }); it('tells the user to select a model', () => { mockServer.expect({ method: 'get', endpoint: 'item/all', response: { status: 'success', data: { items: items.slice(0, 4) } } }); mockServer.expect({ method: 'get', endpoint: 'model/all', response: { status: 'success', data: { models } } }); mockServer.expect({ method: 'get', endpoint: 'model/all', response: { status: 'success', data: { models } } }); mockServer.expect({ method: 'post', endpoint: 'item', json: { modelAddress: models[1].address }, response: { status: 'success', data: { address: 'iGwEZW6nn', modelName: 'Transistor' } } }); models[1].count ++; return app.client.click('#omnibar img').then(() => { return app.client.click('#view-items'); }).then(() => { return app.client.waitForVisible('#items', 5000); }).then(() => { return app.client.click('#items button'); }).then(() => { return app.client.waitForVisible('.create-item-form', 1000); }).then(() => { return app.client.submitForm('.create-item-form form'); }).then(() => { return app.client.waitForVisible('.toast', 1000); }).then(() => { return app.client.getText('.toast'); }).then(text => { assert.strictEqual(text, 'Please select a model.'); return app.client.click('.toast'); }).then(() => { return app.client.waitForVisible('.toast', 10000, true); }).then(() => { return app.client.getValue('.create-item-form select option'); }).then(vals => { assert.include(vals, models[0].address); assert.include(vals, models[1].address); return app.client.selectByValue('.create-item-form select', models[1].address); }).then(() => { return app.client.submitForm('.create-item-form form'); }).then(() => { return app.client.waitForVisible('.toast', 2500); }).then(() => { return app.client.getText('.toast'); }).then(text => { assert.strictEqual(text, 'New item added: Transistor (iGwEZW6nn)'); return app.client.click('.toast'); }).then(() => { return app.client.waitForVisible('.toast', 10000, true); }).then(() => { return app.client.elements('#items .item'); }).then(elements => { assert.lengthOf(elements.value, 4); mockServer.validate(); }); }); it('creates another new item', () => { mockServer.expect({ method: 'get', endpoint: 'model/all', response: { status: 'success', data: { models } } }); mockServer.expect({ method: 'get', endpoint: 'model/all', response: { status: 'success', data: { models } } }); mockServer.expect({ method: 'post', endpoint: 'item', json: { modelAddress: models[1].address }, response: { status: 'success', data: { address: 'iGwEZWXhn', modelName: 'Transistor' } } }); models[1].count ++; return app.client.waitForVisible('#items', 5000).then(() => { return app.client.click('#items button'); }).then(() => { return app.client.waitForVisible('.create-item-form', 1000); }).then(() => { return app.client.getValue('.create-item-form select option'); }).then(vals => { assert.include(vals, models[0].address); assert.include(vals, models[1].address); return app.client.selectByValue('.create-item-form select', models[1].address); }).then(() => { return app.client.submitForm('.create-item-form form'); }).then(() => { return app.client.waitForVisible('.toast', 2500); }).then(() => { return app.client.getText('.toast'); }).then(text => { assert.strictEqual(text, 'New item added: Transistor (iGwEZWXhn)'); return app.client.click('.toast'); }).then(() => { return app.client.waitForVisible('.toast', 10000, true); }).then(() => { return app.client.elements('#items .item'); }).then(elements => { assert.lengthOf(elements.value, 4); mockServer.validate(); }); }); after(() => { if (app && app.isRunning()) { return app.stop().then(() => { return mockServer.stop(); }); } }); });<|fim▁end|>
return app.client.click('.toast'); }).then(() => { return app.client.waitForVisible('.toast', 10000, true); }).then(() => {
<|file_name|>ConcurrentClientsTest.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/epl-v10.html */ package org.opendaylight.controller.netconf.impl; import static com.google.common.base.Preconditions.checkNotNull; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anySetOf; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.io.ByteStreams; import io.netty.channel.ChannelFuture; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.util.HashedWheelTimer; import io.netty.util.concurrent.GlobalEventExecutor; import java.io.DataOutputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.net.InetSocketAddress; import java.net.Socket; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicLong; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.opendaylight.controller.config.util.capability.Capability; import org.opendaylight.controller.config.util.xml.DocumentedException; import org.opendaylight.controller.config.util.xml.XmlUtil; import org.opendaylight.controller.netconf.api.NetconfMessage; import org.opendaylight.controller.netconf.api.monitoring.CapabilityListener; import org.opendaylight.controller.netconf.api.monitoring.NetconfMonitoringService; import org.opendaylight.controller.netconf.api.xml.XmlNetconfConstants; import org.opendaylight.controller.netconf.client.NetconfClientDispatcher; import org.opendaylight.controller.netconf.client.NetconfClientDispatcherImpl; import org.opendaylight.controller.netconf.client.SimpleNetconfClientSessionListener; import org.opendaylight.controller.netconf.client.TestingNetconfClient; import org.opendaylight.controller.netconf.client.conf.NetconfClientConfiguration; import org.opendaylight.controller.netconf.client.conf.NetconfClientConfigurationBuilder; import org.opendaylight.controller.netconf.impl.osgi.AggregatedNetconfOperationServiceFactory; import org.opendaylight.controller.netconf.mapping.api.HandlingPriority; import org.opendaylight.controller.netconf.mapping.api.NetconfOperation; import org.opendaylight.controller.netconf.mapping.api.NetconfOperationChainedExecution; import org.opendaylight.controller.netconf.mapping.api.NetconfOperationService; import org.opendaylight.controller.netconf.mapping.api.NetconfOperationServiceFactory; import org.opendaylight.controller.netconf.nettyutil.handler.exi.NetconfStartExiMessage; import org.opendaylight.controller.netconf.util.messages.NetconfHelloMessageAdditionalHeader; import org.opendaylight.controller.netconf.util.messages.NetconfMessageUtil; import org.opendaylight.controller.netconf.util.test.XmlFileLoader; import org.opendaylight.protocol.framework.NeverReconnectStrategy; import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.inet.types.rev100924.Uri; import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.netconf.state.CapabilitiesBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; @RunWith(Parameterized.class) public class ConcurrentClientsTest { private static final Logger LOG = LoggerFactory.getLogger(ConcurrentClientsTest.class); private static ExecutorService clientExecutor; private static final int CONCURRENCY = 32; private static final InetSocketAddress netconfAddress = new InetSocketAddress("127.0.0.1", 8303); private int nettyThreads; private Class<? extends Runnable> clientRunnable; private Set<String> serverCaps; public ConcurrentClientsTest(int nettyThreads, Class<? extends Runnable> clientRunnable, Set<String> serverCaps) { this.nettyThreads = nettyThreads; this.clientRunnable = clientRunnable; this.serverCaps = serverCaps; } @Parameterized.Parameters() public static Collection<Object[]> data() { return Arrays.asList(new Object[][]{{4, TestingNetconfClientRunnable.class, NetconfServerSessionNegotiatorFactory.DEFAULT_BASE_CAPABILITIES}, {1, TestingNetconfClientRunnable.class, NetconfServerSessionNegotiatorFactory.DEFAULT_BASE_CAPABILITIES}, // empty set of capabilities = only base 1.0 netconf capability {4, TestingNetconfClientRunnable.class, Collections.emptySet()}, {4, TestingNetconfClientRunnable.class, getOnlyExiServerCaps()}, {4, TestingNetconfClientRunnable.class, getOnlyChunkServerCaps()}, {4, BlockingClientRunnable.class, getOnlyExiServerCaps()}, {1, BlockingClientRunnable.class, getOnlyExiServerCaps()}, }); } private EventLoopGroup nettyGroup; private NetconfClientDispatcher netconfClientDispatcher; HashedWheelTimer hashedWheelTimer; private TestingNetconfOperation testingNetconfOperation; public static NetconfMonitoringService createMockedMonitoringService() { NetconfMonitoringService monitoring = mock(NetconfMonitoringService.class); doNothing().when(monitoring).onSessionUp(any(NetconfServerSession.class)); doNothing().when(monitoring).onSessionDown(any(NetconfServerSession.class)); doReturn(new AutoCloseable() { @Override public void close() throws Exception { } }).when(monitoring).registerListener(any(NetconfMonitoringService.MonitoringListener.class)); doNothing().when(monitoring).onCapabilitiesChanged(anySetOf(Capability.class), anySetOf(Capability.class)); doReturn(new CapabilitiesBuilder().setCapability(Collections.<Uri>emptyList()).build()).when(monitoring).getCapabilities(); return monitoring; } @BeforeClass public static void setUpClientExecutor() { clientExecutor = Executors.newFixedThreadPool(CONCURRENCY, new ThreadFactory() { int i = 1; @Override public Thread newThread(final Runnable r) { Thread thread = new Thread(r); thread.setName("client-" + i++); thread.setDaemon(true); return thread; } }); } @Before public void setUp() throws Exception { hashedWheelTimer = new HashedWheelTimer(); nettyGroup = new NioEventLoopGroup(nettyThreads); netconfClientDispatcher = new NetconfClientDispatcherImpl(nettyGroup, nettyGroup, hashedWheelTimer); AggregatedNetconfOperationServiceFactory factoriesListener = new AggregatedNetconfOperationServiceFactory(); testingNetconfOperation = new TestingNetconfOperation(); factoriesListener.onAddNetconfOperationServiceFactory(new TestingOperationServiceFactory(testingNetconfOperation)); SessionIdProvider idProvider = new SessionIdProvider(); NetconfServerSessionNegotiatorFactory serverNegotiatorFactory = new NetconfServerSessionNegotiatorFactory( hashedWheelTimer, factoriesListener, idProvider, 5000, createMockedMonitoringService(), serverCaps); NetconfServerDispatcherImpl.ServerChannelInitializer serverChannelInitializer = new NetconfServerDispatcherImpl.ServerChannelInitializer(serverNegotiatorFactory); final NetconfServerDispatcherImpl dispatch = new NetconfServerDispatcherImpl(serverChannelInitializer, nettyGroup, nettyGroup); ChannelFuture s = dispatch.createServer(netconfAddress); s.await(); } @After public void tearDown(){ hashedWheelTimer.stop(); try { nettyGroup.shutdownGracefully().get(); } catch (InterruptedException | ExecutionException e) { LOG.warn("Ignoring exception while cleaning up after test", e); } } @AfterClass public static void tearDownClientExecutor() { clientExecutor.shutdownNow(); } @Test(timeout = CONCURRENCY * 1000) public void testConcurrentClients() throws Exception { List<Future<?>> futures = Lists.newArrayListWithCapacity(CONCURRENCY); for (int i = 0; i < CONCURRENCY; i++) { futures.add(clientExecutor.submit(getInstanceOfClientRunnable())); } for (Future<?> future : futures) { try { future.get(); } catch (InterruptedException e) { throw new IllegalStateException(e); } catch (ExecutionException e) { LOG.error("Thread for testing client failed", e); fail("Client failed: " + e.getMessage()); } } assertEquals(CONCURRENCY, testingNetconfOperation.getMessageCount()); } public static Set<String> getOnlyExiServerCaps() { return Sets.newHashSet( XmlNetconfConstants.URN_IETF_PARAMS_NETCONF_BASE_1_0, XmlNetconfConstants.URN_IETF_PARAMS_NETCONF_CAPABILITY_EXI_1_0 ); } public static Set<String> getOnlyChunkServerCaps() { return Sets.newHashSet( XmlNetconfConstants.URN_IETF_PARAMS_NETCONF_BASE_1_0, XmlNetconfConstants.URN_IETF_PARAMS_NETCONF_BASE_1_1 ); } public Runnable getInstanceOfClientRunnable() throws Exception { return clientRunnable.getConstructor(ConcurrentClientsTest.class).newInstance(this); } /** * Responds to all operations except start-exi and counts all requests */ private static class TestingNetconfOperation implements NetconfOperation { private final AtomicLong counter = new AtomicLong(); @Override public HandlingPriority canHandle(Document message) { return XmlUtil.toString(message).contains(NetconfStartExiMessage.START_EXI) ? HandlingPriority.CANNOT_HANDLE : HandlingPriority.HANDLE_WITH_MAX_PRIORITY; } @Override public Document handle(Document requestMessage, NetconfOperationChainedExecution subsequentOperation) throws DocumentedException { try { LOG.info("Handling netconf message from test {}", XmlUtil.toString(requestMessage)); counter.getAndIncrement(); return XmlUtil.readXmlToDocument("<test/>"); } catch (Exception e) { throw new RuntimeException(e); } } public long getMessageCount() { return counter.get(); } } /** * Hardcoded operation service factory */ private static class TestingOperationServiceFactory implements NetconfOperationServiceFactory { private final NetconfOperation[] operations; public TestingOperationServiceFactory(final NetconfOperation... operations) { this.operations = operations; } @Override public Set<Capability> getCapabilities() { return Collections.emptySet(); } @Override public AutoCloseable registerCapabilityListener(final CapabilityListener listener) { return new AutoCloseable(){ @Override public void close() throws Exception {} }; } @Override public NetconfOperationService createService(String netconfSessionIdForReporting) { return new NetconfOperationService() { @Override public Set<NetconfOperation> getNetconfOperations() { return Sets.newHashSet(operations); } @Override public void close() {} }; } } /** * Pure socket based blocking client */ public final class BlockingClientRunnable implements Runnable { @Override public void run() { try { run2(); } catch (Exception e) { throw new IllegalStateException(Thread.currentThread().getName(), e); } } private void run2() throws Exception { InputStream clientHello = checkNotNull(XmlFileLoader .getResourceAsStream("netconfMessages/client_hello.xml")); InputStream getConfig = checkNotNull(XmlFileLoader.getResourceAsStream("netconfMessages/getConfig.xml")); Socket clientSocket = new Socket(netconfAddress.getHostString(), netconfAddress.getPort()); DataOutputStream outToServer = new DataOutputStream(clientSocket.getOutputStream()); InputStreamReader inFromServer = new InputStreamReader(clientSocket.getInputStream()); StringBuffer sb = new StringBuffer(); while (sb.toString().endsWith("]]>]]>") == false) { sb.append((char) inFromServer.read()); } LOG.info(sb.toString()); outToServer.write(ByteStreams.toByteArray(clientHello)); outToServer.write("]]>]]>".getBytes()); outToServer.flush(); // Thread.sleep(100); outToServer.write(ByteStreams.toByteArray(getConfig)); outToServer.write("]]>]]>".getBytes()); outToServer.flush(); Thread.sleep(100); sb = new StringBuffer(); while (sb.toString().endsWith("]]>]]>") == false) { sb.append((char) inFromServer.read()); } LOG.info(sb.toString()); clientSocket.close(); } } /** * TestingNetconfClient based runnable */ public final class TestingNetconfClientRunnable implements Runnable { @Override public void run() { try { final TestingNetconfClient netconfClient = new TestingNetconfClient(Thread.currentThread().getName(), netconfClientDispatcher, getClientConfig());<|fim▁hole|> .xmlFileToNetconfMessage("netconfMessages/getConfig.xml"); NetconfMessage result = netconfClient.sendRequest(getMessage).get(); LOG.info("Client with session id {}: got result {}", sessionId, result); Preconditions.checkState(NetconfMessageUtil.isErrorMessage(result) == false, "Received error response: " + XmlUtil.toString(result.getDocument()) + " to request: " + XmlUtil.toString(getMessage.getDocument())); netconfClient.close(); LOG.info("Client with session id {}: ended", sessionId); } catch (final Exception e) { throw new IllegalStateException(Thread.currentThread().getName(), e); } } private NetconfClientConfiguration getClientConfig() { final NetconfClientConfigurationBuilder b = NetconfClientConfigurationBuilder.create(); b.withAddress(netconfAddress); b.withAdditionalHeader(new NetconfHelloMessageAdditionalHeader("uname", "10.10.10.1", "830", "tcp", "client")); b.withSessionListener(new SimpleNetconfClientSessionListener()); b.withReconnectStrategy(new NeverReconnectStrategy(GlobalEventExecutor.INSTANCE, NetconfClientConfigurationBuilder.DEFAULT_CONNECTION_TIMEOUT_MILLIS)); return b.build(); } } }<|fim▁end|>
long sessionId = netconfClient.getSessionId(); LOG.info("Client with session id {}: hello exchanged", sessionId); final NetconfMessage getMessage = XmlFileLoader
<|file_name|>events.py<|end_file_name|><|fim▁begin|>from openstates.utils import LXMLMixin import datetime as dt from pupa.scrape import Scraper, Event from .utils import get_short_codes from requests import HTTPError import pytz URL = "http://www.capitol.hawaii.gov/upcominghearings.aspx" class HIEventScraper(Scraper, LXMLMixin): def get_related_bills(self, href): ret = [] try: page = self.lxmlize(href) except HTTPError: return ret bills = page.xpath(".//a[contains(@href, 'Bills')]")<|fim▁hole|> except StopIteration: continue tds = row.xpath("./td") descr = tds[1].text_content() for i in ['\r\n', '\xa0']: descr = descr.replace(i, '') ret.append({"bill_id": bill.text_content(), "type": "consideration", "descr": descr}) return ret def scrape(self): tz = pytz.timezone("US/Eastern") get_short_codes(self) page = self.lxmlize(URL) table = page.xpath( "//table[@id='ctl00_ContentPlaceHolderCol1_GridView1']")[0] for event in table.xpath(".//tr")[1:]: tds = event.xpath("./td") committee = tds[0].text_content().strip() descr = [x.text_content() for x in tds[1].xpath(".//span")] if len(descr) != 1: raise Exception descr = descr[0].replace('.', '').strip() when = tds[2].text_content().strip() where = tds[3].text_content().strip() notice = tds[4].xpath(".//a")[0] notice_href = notice.attrib['href'] notice_name = notice.text when = dt.datetime.strptime(when, "%m/%d/%Y %I:%M %p") when = pytz.utc.localize(when) event = Event(name=descr, start_time=when, classification='committee-meeting', description=descr, location_name=where, timezone=tz.zone) if "/" in committee: committees = committee.split("/") else: committees = [committee] for committee in committees: if "INFO" not in committee: committee = self.short_ids.get("committee", {"chamber": "unknown", "name": committee}) else: committee = { "chamber": "joint", "name": committee, } event.add_committee(committee['name'], note='host') event.add_source(URL) event.add_document(notice_name, notice_href, media_type='text/html') for bill in self.get_related_bills(notice_href): a = event.add_agenda_item(description=bill['descr']) a.add_bill( bill['bill_id'], note=bill['type'] ) yield event<|fim▁end|>
for bill in bills: try: row = next(bill.iterancestors(tag='tr'))
<|file_name|>structs.go<|end_file_name|><|fim▁begin|>package log type logLevel struct { Level int Prefix string ColorFunc func(...interface{}) string } type logLevels []*logLevel func (l *logLevels) getFunc(Level int) func(...interface{}) string { level := l.getLevel(Level) if level != nil { return level.ColorFunc }<|fim▁hole|>} func (l *logLevels) getLevel(Level int) *logLevel { for _, item := range *l { if item.Level == Level { return item } } return nil }<|fim▁end|>
return nil
<|file_name|>thermo.py<|end_file_name|><|fim▁begin|>import math import scipy.optimize as opt log = math.log exp = math.exp small = 1e-20 # unitless T0 = 1 # K Tcrit = 650 # K zero_C = 273.15 # K p0 = 1 # Pa atm = 101325 # Pa bar = 100000 # Pa # Tcrit is slightly above the critical point of water. This is used as an upperbound # on values of T that would be even vaguely physically reasonable for our thermodynamic # equations. Exact value here is unimportant. ##### # The following values are quoted exactly* from # CRC Handbook of Chemistry and Physics, 84th edition, 2003-2004, ISBN 0-8493-0484-9 # Comments specify units and section number in the source. # *changes in units like g -> kg are done silently ##### R = 8.314510 # J / mol / K 1-54 Mw_ = 0.01801528 # kg / mol 6-4 molecular weight of water # enthalpy of vaporization of water at specified temperature vap_T = zero_C # K 6-3 vap_enthalpy = 45054 # J / mol 6-3 # heat capacity and density of air (dry air?) at specified temperature and 1 bar pressure air_T = 300 # K 6-1 air_cp = 1007 # J / kg / K 6-1 air_rho = 1.161 # kg / m^3 6-1 # heat capacity of liquid water at specified temperature lw_T = 10 + zero_C # K 6-3 lw_cp = 4192.1 # J / kg / K 6-3 # saturation vapor pressure at specified temperature sat_T = 10 + zero_C # K 6-10 sat_p_star = 1228.1 # Pa 6-10 #### # End of CRC reference values ### # Old value of cv_ I was using is 37.47 J / mol / K. # New value is from the following source: # 1870 J / kg / K (or 33.68857 J / mol / K) # page 77 # Iribarne & Godson (Eds.) (2012). Atmospheric thermodynamics (Vol. 6). Springer Science & Business Media. # Derived values Md_ = air_rho * R * air_T / bar # kg / mol molecular weight of air cd_ = air_cp * Md_ # J / mol / K heat capacity of air, constant pressure cv_ = 1870 * Mw_ # J / mol / K heat capacity of water vapor, constant p cl_ = lw_cp * Mw_ # J / mol / K heat capacity of liquid water, constant p cd = cd_ / R # unitless cv = cv_ / R # unitless cl = cl_ / R # unitless Md = Md_ / R # kg K / J Mw = Mw_ / R # kg K / J epsilon = Mw_ / Md_ # unitless Lc = vap_enthalpy / R + (cl - cv) * vap_T # K Tc = sat_T # K pc = sat_p_star * exp(Lc / Tc) # Pa # Clausius-Clapeyron relation def compute_p_star(T): return pc * exp((cv - cl) * log(T / Tc) - Lc / T) def compute_y_s(p, p_star): return p_star / (p - p_star) def compute_y_s_from_T(p, T): return compute_y_s(p, compute_p_star(T)) def compute_ell(T): return cv - cl + Lc / T def compute_issat_ypT(y, p, T): y_s = compute_y_s_from_T(p, T) return (y_s > 0) and (y > y_s) # Correctness of this is non-trivial. def compute_issat_yps(y, p, s): return compute_issat_ypT(y, p, compute_T_unsat(y, p, s)) def compute_M(y): return Md * (1 + epsilon * y) def compute_Ms_unsat(y, p, T): if y < small: return cd * log(T / T0) - log(p / p0) else: return ((cd + y * cv) * log(T / T0) - (1 + y) * log(p / p0) + (1 + y) * log(1 + y) - y * log(y)) def compute_Ms_sat(y, p, T): p_star = compute_p_star(T) y_s = compute_y_s(p, p_star) ell = compute_ell(T) if y < small: # Unlikely to represent a physical situation, # since y > y_s for saturated parcels. return cd * log(T / T0) - log(p_star / p0) + log(y_s) + y_s * ell else: return ((cd + y * cv) * log(T / T0) - (1 + y) * log(p_star / p0) + log (y_s) + (y_s - y) * ell) def compute_T_unsat(y, p, s): Ms = compute_M(y) * s if y < small: return T0 * exp((Md * s + log(p / p0)) / cd) else: return T0 * exp( (Ms + (1 + y) * log(p / p0) - (1 + y) * log(1 + y) + y * log(y)) / (cd + y * cv) ) # # For ease of writing this function and computation speed, we assume that the parcel # specified is saturated, that y > 1e-10, that p < 1e10 Pa, and that the parcel's temperature # is less than Tcrit. If any of these assumptions are violated this function may diverge, # throw an exception, or return a nonsense value. # # This function is the main bottleneck in speeding up the code. # def compute_T_sat(y, p, s): if y < 1e-10 or p > 1e10: raise ValueError() # # Equation we wish to solve: # M * s = (cd + y*cv) * log(T / T0) - (1 + y)*log(p_star / p0) + log(y_s) + (y_s - y) * ell # where # p_star is a function of T # y_s = p_star / (p - p_star) # ell = cv - cl + Lc / T # # Note that for T < Tcrit, ell > 0 and d p_star/dT > 0. # # Let # f(T) = c0 * log(T) - (1 + y) * log(p_star) + log(y_s) + (y_s - y) * ell + c1 # = c0 * log(T) - y * log(p_star) - log(p - p_star) + (y_s - y) * ell + c1 # = c0 * log(T) - y * ((cv - cl) log(T / Tc) - Lc / T) - log(p - p_star) # + y_s * ell - y * (cv - cl) - y * Lc / T + c1 - y * log(pc) # = c0 * log(T) - y * (cv - cl) * log(T) - log(p - p_star) # + y_s * ell + c2 # = c3 * log(T) - log(p - p_star) + y_s * ell + c2 # where # c0 = cd + y * cv # c1 = - (cd + y * cv) * log(T0) + (1 + y) * log(p0) - compute_M(y) * s # c2 = c1 - y * log(pc) - y * (cv - cl) + y * (cv - cl) * log(Tc) # c3 = cd + y * cl<|fim▁hole|> # Note that f(T) is increasing in T for reasonable values of p and T. We want to find # where f(T) = 0. # c1 = - (cd + y * cv) * log(T0) + (1 + y) * log(p0) - compute_M(y) * s c2 = c1 - y * log(pc) - y * (cv - cl) + y * (cv - cl) * log(Tc) c3 = cd + y * cl # # Since the parcel is saturated we know that y_s < y, so # p_star = p (y_s / (1 + y_s)) = p (1 - 1 / (1 + y_s)) < p (1 - 1 / (1 + y)) # so we have an upperbound on the value of p_star. Furthermore, since cv - cl < 0, # p_star = pc exp((cv - cl) log(T / Tc) - Lc / T) # > pc exp((cv - cl) log(Tcrit / Tc) - Lc / T) # so # -Lc / T < log(p_star / pc) + (cl - cv) log(Tcrit / Tc) # Lc / T > -log(p_star / pc) + (cv - cl) log(Tcrit / Tc) [1] # T < Lc / (-log(p_star / pc) + (cv - cl) log(Tcrit / Tc)) # T < Lc / (-log(p / pc) - log(y / (1 + y)) + (cv - cl) log(Tcrit / Tc)) # where we have used that the right side of [1] is positive for p_star smaller than 1e11 Pa # or so. # c4 = (cv - cl) * log(Tcrit / Tc) p_star_max = p * y / (1 + y) Tmax = Lc / (c4 - log(p_star_max / pc)) Tmax = min(Tmax, Tcrit) # Couldn't figure out a good way to lower bound it. 100 K is pretty safe. Tmin = 100 def f(T): p_star = compute_p_star(T) if p_star >= p_star_max: return T * 1.0e200 y_s = p_star / (p - p_star) ell = cv - cl + Lc / T return c3 * log(T) - log(p - p_star) + y_s * ell + c2 if f(Tmin) >= 0: return Tmin if f(Tmax) <= 0: return Tmax return opt.brentq(f, Tmin, Tmax) def compute_Tv_sat(y, p, s): T = compute_T_sat(y, p, s) y_s = compute_y_s_from_T(p, T) return T * (1 + y_s) / (1 + y * epsilon) def compute_Tv_unsat(y, p, s): return compute_T_unsat(y, p, s) * (1 + y) / (1 + y * epsilon) def compute_Mh_unsat(y, p, s): return (cd + y * cv) * compute_T_unsat(y, p, s) def compute_Mh_sat(y, p, s): T = compute_T_sat(y, p, s) y_s = compute_y_s_from_T(p, T) ell = compute_ell(T) return (cd + y * cv + (y_s - y) * ell) * T def compute_Mh_dp_unsat(y, p, s): return (1 + y) * compute_T_unsat(y, p, s) / p def compute_Mh_dp_sat(y, p, s): T = compute_T_sat(y, p, s) y_s = compute_y_s_from_T(p, T) return (1 + y_s) * T / p ############################## # # User-friendly thermodynamic functions with user-friendly names # ############################## # w is kg / kg def compute_w(y): return y * epsilon # y is mol / mol def compute_y(w): return w / epsilon # kg / mol def molecular_weight_water(): return Mw_ # kg / mol def molecular_weight_dry_air(): return Md_ # kg / mol def molecular_weight_moist_air(y): return (Md_ + y * Mw_) / (1 + y) # partial pressure of water vapor at the saturation point # Pa def saturation_vapor_pressure(T): return p_star(T) # unitless def relative_humidity(y, p, T): y_s = compute_y_s_from_T(p, T) if y > y_s: return 1 else: return y / y_s # J / mol def latent_heat_condensation(T): return compute_ell(T) * R * T # True or False def is_saturated(y, p, T): return compute_issat_ypT(y, p, T) # J / kg / K def entropy(y, p, T): if compute_issat_ypT(y, p, T): return compute_Ms_sat(y, p, T) / compute_M(y) else: return compute_Ms_unsat(y, p, T) / compute_M(y) # K def temperature(y, p, s): if compute_issat_yps(y, p, s): return compute_T_sat(y, p, s) else: return compute_T_unsat(y, p, s) # K def virtual_temperature(y, p, s): if compute_issat_yps(y, p, s): return compute_Tv_sat(y, p, s) else: return compute_Tv_unsat(y, p, s) # J / kg def enthalpy(y, p, s): if compute_issat_yps(y, p, s): return compute_Mh_sat(y, p, s) / compute_M(y) else: return compute_Mh_unsat(y, p, s) / compute_M(y) # J / kg / Pa = m^3 / kg, units of specific volume def enthalpy_dp(y, p, s): if compute_issat_yps(y, p, s): return compute_Mh_dp_sat(y, p, s) / compute_M(y) else: return compute_Mh_dp_unsat(y, p, s) / compute_M(y) # For a parcel moving from pold to pnew, given the old temperature, # compute the new temperature # K def new_temperature(y, Told, pold, pnew): return temperature(y, pnew, entropy(y, pold, Told)) # For a parcel moving from pold to pnew, given the old temperature, # compute the change in enthalpy # J / kg def change_in_enthalpy(y, Told, pold, pnew): s = entropy(y, Told, pold) return enthalpy(y, pnew, s) - enthalpy(y, pold, s)<|fim▁end|>
#
<|file_name|>KEGGcompoundImport.java<|end_file_name|><|fim▁begin|>/* * Concept profile generation tool suite * Copyright (C) 2015 Biosemantics Group, Erasmus University Medical Center, * Rotterdam, The Netherlands * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/> */ package JochemBuilder.KEGGcompound; import org.erasmusmc.ontology.OntologyFileLoader; import org.erasmusmc.ontology.OntologyStore; import org.erasmusmc.ontology.ontologyutilities.OntologyCurator; import org.erasmusmc.utilities.StringUtilities; import JochemBuilder.SharedCurationScripts.CasperForJochem; import JochemBuilder.SharedCurationScripts.CurateUsingManualCurationFile; import JochemBuilder.SharedCurationScripts.RemoveDictAndCompanyNamesAtEndOfTerm; import JochemBuilder.SharedCurationScripts.RewriteFurther; import JochemBuilder.SharedCurationScripts.SaveOnlyCASandInchiEntries; public class KEGGcompoundImport { public static String date = "110809"; public static String home = "/home/khettne/Projects/Jochem"; public static String keggcImportFile = home+"/KEGG/Compound/compound"; public static String compoundToDrugMappingOutFile = home+"/KEGG/Compound/compoundToDrugMapping"; public static String keggcToInchiMappingFile = home+"/KEGG/Compound/compound.inchi"; public static String keggcDictionariesLog = home+"/KEGG/Compound/KEGGc_dictionaries_"+date+".log"; public static String keggcRewriteLog = home+"/KEGG/Compound/KEGGcCAS_casperFiltered_"+date+".log"; public static String keggcLowerCaseLog = home+"/KEGG/Compound/KEGGcCAS_lowerCase_"+date+".log"; public static String termsToRemove = "keggcTermsToRemove.txt"; public static String keggcCuratedOntologyPath = home+"/KEGG/Compound/KEGGcCAS_curated_"+date+".ontology"; public static String keggcCuratedLog = home+"/KEGG/Compound/KEGGcCAS_curated_"+date+".log"; public static void main(String[] args) { OntologyStore ontology = new OntologyStore(); OntologyFileLoader loader = new OntologyFileLoader(); //Make unprocessed thesaurus ChemicalsFromKEGGcompound keggchem = new ChemicalsFromKEGGcompound(); ontology = keggchem.run(keggcImportFile, compoundToDrugMappingOutFile); RemoveDictAndCompanyNamesAtEndOfTerm remove = new RemoveDictAndCompanyNamesAtEndOfTerm(); ontology = remove.run(ontology, keggcDictionariesLog); MapKEGGc2InChI mapOntology = new MapKEGGc2InChI(); ontology = mapOntology.map(ontology, keggcToInchiMappingFile); // CAS and InChI SaveOnlyCASandInchiEntries make = new SaveOnlyCASandInchiEntries(); ontology = make.run(ontology); //Rewrite CasperForJochem casper = new CasperForJochem(); casper.run(ontology, keggcRewriteLog); // Make some entries lower case and filter further RewriteFurther rewrite = new RewriteFurther(); ontology = rewrite.run(ontology, keggcLowerCaseLog); //Remove terms based on medline frequency CurateUsingManualCurationFile curate = new CurateUsingManualCurationFile(); ontology = curate.run(ontology, keggcCuratedLog,termsToRemove); //Set default flags and save ontology OntologyCurator curator = new OntologyCurator(); curator.curateAndPrepare(ontology);<|fim▁hole|> System.out.println("Done! " + StringUtilities.now()); } }<|fim▁end|>
loader.save(ontology,keggcCuratedOntologyPath);
<|file_name|>multipleblock.rs<|end_file_name|><|fim▁begin|>// /* */ #![feature(inclusive_range_syntax)] #![feature(type_ascription)] #![feature(more_struct_aliases)] extern crate modbus_server; extern crate futures; extern crate tokio_proto; extern crate tokio_service; extern crate docopt; extern crate rustc_serialize; use std::sync::{Arc,Mutex}; use std::str; use futures::{future}; use std::collections::HashMap; use docopt::Docopt; use std::io::{self}; use tokio_proto::TcpServer; use tokio_service::Service; use modbus_server::{ModbusTCPProto,ModbusTCPResponse,ModbusTCPRequest}; const USAGE: &'static str = " Usage: multiblock [options] <resource>... Options: --addr=<addr> # Base URL [default: 127.0.0.1:502]. "; #[derive(Debug, RustcDecodable)]<|fim▁hole|> // TODO: add ModbusRTUCodec use modbus_server::BlankRegisters; pub struct ModbusService { blocks:HashMap<u8,Arc<Mutex<BlankRegisters>>> } impl ModbusService { fn new ( blocks:HashMap<u8,Arc<Mutex<BlankRegisters>>>)->ModbusService { ModbusService{ blocks:blocks} } } impl Service for ModbusService { type Request = ModbusTCPRequest; type Response = ModbusTCPResponse; type Error = io::Error; type Future = future::FutureResult<Self::Response, Self::Error>; fn call(&self, req: Self::Request) -> Self::Future { let mut a = self.blocks[&req.header.uid].lock().unwrap(); future::finished(Self::Response { header:req.header, pdu: a.call(req.pdu) }) } } fn main() { let args: Args = Docopt::new(USAGE) .and_then(|d| d.decode()) .unwrap_or_else(|e| {println!("DAMN {:?}",e); e.exit()}); println!("{:?}", args); let mut blocks : HashMap<u8,Arc<Mutex<BlankRegisters>>> = HashMap::new(); for r in args.arg_resource { let block = Arc::new(Mutex::new(BlankRegisters::new())); blocks.insert(r,block); } TcpServer::new(ModbusTCPProto, args.flag_addr.parse().unwrap()) .serve(move || Ok(ModbusService::new(blocks.clone()))); }<|fim▁end|>
struct Args { arg_resource: Vec<u8>, flag_addr: String }
<|file_name|>samples.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'events.ui' # # Created by: PyQt4 UI code generator 4.11.4 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui from collections import * from functools import * import os, glob import pandas as pd try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_SamplesDialog(QtGui.QDialog): def __init__(self, parent=None, datafolder=None): """ Constructor """ QtGui.QDialog.__init__(self, parent) # self.filelist = filelist self.datafolder = datafolder # labels font self.font_labels = QtGui.QFont("Arial", 12, QtGui.QFont.Bold) self.font_edits = QtGui.QFont("Arial", 12) self.font_buttons = QtGui.QFont("Arial", 10, QtGui.QFont.Bold) self.setupUi(self) self.exec_() def setupUi(self, Dialog): Dialog.setObjectName(_fromUtf8("Dialog")) Dialog.resize(1000, 400) self.gridLayout = QtGui.QGridLayout(Dialog) self.gridLayout.setObjectName(_fromUtf8("gridLayout")) # list of Events self.prepare_form(Dialog) self.retranslateUi(Dialog) QtCore.QMetaObject.connectSlotsByName(Dialog) def load_data(self): print(self.datafolder) self.samplefile = glob.glob(os.path.join(self.datafolder, "*_SAMPLES.csv"))[0] if os.path.isfile(self.samplefile): self.samplesdf = pd.read_csv(self.samplefile, encoding='ISO-8859-1') else: print("File not found: ", self.samplefile) self.samplesdf = None self.combodefaults = {'cuvette': ['600', '2000', '4000']} def prepare_form(self, Dialog): # load or reload data self.load_data() # form dicts edit_list = ['date', 'time', 'samplename', 'filename', 'smoothing', 'cal32', 'cal44', 'cons32', 'cons44', 'zero44', 'zero45', 'zero46', 'zero47', 'zero49'] combo_list = ['user', 'membrane', 'cuvette'] self.labels = defaultdict(defaultdict) self.edits = defaultdict(defaultdict) self.radios = defaultdict(defaultdict) self.combobox = defaultdict(defaultdict) self.labs = defaultdict(defaultdict) self.labs = {"time": "Time", "date": "Date", "samplename": "Sample Name", "filename": "File Name", "smoothing": "Smoothing", "cuvette": "Cuvette", "user": "User", "membrane": "Membrane", "cal44": "Calibration 44", "cal32": "Calibration 32", "cons32": "Consumption 32", "cons44": "Consumption 44", "zero32": "Zero 32", "zero44": "Zero 44", "zero45": "Zero 45", "zero46": "Zero 46", "zero47": "Zero 47", "zero49": "Zero 49"} self.buttons = OrderedDict(sorted({'Apply': defaultdict(object), 'Delete': defaultdict(object)}.items())) xpos, ypos = 1, 0 for row in self.samplesdf.iterrows(): row_index = row[0] r = row[1] self.radios[row_index] = QtGui.QRadioButton(Dialog) self.radios[row_index].setObjectName(_fromUtf8("_".join(["radio", str(row_index)]))) self.gridLayout.addWidget(self.radios[row_index], ypos+1, 0, 1, 1) for k in ['samplename', 'date', 'time', 'cuvette']: # create labels if ypos == 0: self.labels[k] = QtGui.QLabel(Dialog) self.labels[k].setObjectName(_fromUtf8("_".join(["label", k]))) self.labels[k].setText(str(self.labs[k])) self.labels[k].setAlignment(QtCore.Qt.AlignCenter | QtCore.Qt.AlignVCenter) self.labels[k].setFont(self.font_labels) self.gridLayout.addWidget(self.labels[k], 0, xpos, 1, 1) if k in edit_list: self.edits[k][row_index] = QtGui.QLineEdit(Dialog) self.edits[k][row_index].setObjectName(_fromUtf8("_".join(["edit", k, str(row_index)]))) self.edits[k][row_index].setText(str(r[k])) self.edits[k][row_index].setFont(self.font_edits) if k in ['time', 'date']: self.edits[k][row_index].setFixedWidth(80) self.gridLayout.addWidget(self.edits[k][row_index], ypos+1, xpos, 1, 1) elif k in combo_list: self.combobox[k][row_index] = QtGui.QComboBox(Dialog) self.combobox[k][row_index].setObjectName(_fromUtf8("_".join(["combo", k, str(row_index)]))) self.combobox[k][row_index].addItems(self.combodefaults[k]) self.combobox[k][row_index].setCurrentIndex(self.combobox[k][row_index].findText(str(r[k]), QtCore.Qt.MatchFixedString)) self.combobox[k][row_index].setFont(self.font_edits) self.gridLayout.addWidget(self.combobox[k][row_index], ypos+1, xpos, 1, 1) <|fim▁hole|> xpos += 1 # create buttons for k in self.buttons.keys(): # if ypos > 0: self.buttons[k][row_index] = QtGui.QPushButton(Dialog) self.buttons[k][row_index].setObjectName(_fromUtf8("_".join(["event", k, "button", str(row_index)]))) self.buttons[k][row_index].setText(_translate("Dialog", k + str(row_index), None)) self.buttons[k][row_index].setFont(self.font_buttons) if k == 'Apply': self.buttons[k][row_index].clicked.connect(partial(self.ask_apply_changes, [row_index, Dialog])) self.buttons[k][row_index].setStyleSheet("background-color: #ffeedd") elif k == 'Delete': self.buttons[k][row_index].clicked.connect(partial(self.ask_delete_confirm1, [row_index, Dialog])) self.buttons[k][row_index].setStyleSheet("background-color: #ffcddd") self.gridLayout.addWidget(self.buttons[k][row_index], ypos+1, xpos, 1, 1) xpos += 1 # increments ypos += 1 xpos = 1 Dialog.resize(1000, 70 + (30 * ypos)) # self.add_row(Dialog) def ask_delete_confirm1(self, args): sid = args[0] Dialog = args[1] # check if radio button is checked. if self.radios[sid].isChecked(): msg = "Are you sure you want to delete the following sample : \n\n" details = "" for c in self.samplesdf.columns: details += str(c) + ": " + str(self.samplesdf.at[sid, c]) + "\n" reply = QtGui.QMessageBox.warning(self, 'Confirmation #1', msg + details, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) if reply == QtGui.QMessageBox.Yes: msg2 = "Are you sure REALLY REALLY sure you want to delete the following sample ? \n\n" + \ "This is the last confirmation message. After confirming, the files will be PERMANENTLY deleted and the data WILL be lost ! \n\n" msgbox = QtGui.QMessageBox.critical(self, 'Confirmation #2', msg2 + details, QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No) reply2 = msgbox if reply2 == QtGui.QMessageBox.Yes: # deletion confirmed self.delete_confirmed(sid) self.update_form( Dialog) else: QtGui.QMessageBox.question(self, 'Error', 'Please select the sample you want to delete on the left', QtGui.QMessageBox.Ok) def delete_confirmed(self, sid): # sample file filename = self.samplesdf.loc[sid, 'filename'] # delete row in samplesdf self.samplesdf = self.samplesdf.drop(self.samplesdf.index[sid]) self.samplesdf.to_csv(self.samplefile, index=False, encoding='ISO-8859-1') # delete file in rawdata if os.path.isfile(os.path.join(self.datafolder, "rawdata", filename)): os.remove(os.path.join(self.datafolder, "rawdata", filename)) # print(" delete: ", os.path.join(self.datafolder, "rawdata", filename)) # delete file in data if os.path.isfile(os.path.join(self.datafolder, filename)): os.remove(os.path.join(self.datafolder, filename)) # print(" delete: ", os.path.join(self.datafolder, filename)) def ask_apply_changes(self, args): sid = args[0] Dialog = args[1] newdata=defaultdict(str) for k in self.edits.keys(): newdata[k] = self.edits[k][sid].text() for k in self.combobox.keys(): newdata[k] = self.combobox[k][sid].currentText() details = "" for k in newdata: details += str(self.samplesdf.at[sid, k]) + '\t --> \t' + str(newdata[k]) + "\n" msg = "Are you sure you want to apply the changes to sample " + str(self.samplesdf.at[sid, 'samplename']) + " ?\n\n" reply = QtGui.QMessageBox.question(self, 'Modify a sample', msg + details, QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) if reply == QtGui.QMessageBox.Yes: self.apply_changes_confirmed(sid, newdata) self.update_form(Dialog) else: print('cancel modification') def apply_changes_confirmed(self, sid, newdata): # rename files newdata['filename'] = str(newdata['date']) + "_" + str(newdata['samplename']) + ".csv" os.rename(os.path.join(self.datafolder, str(self.samplesdf.at[sid, 'filename'])), os.path.join(self.datafolder, str(newdata['filename']))) os.rename(os.path.join(self.datafolder, "rawdata", str(self.samplesdf.at[sid, 'filename'])), os.path.join(self.datafolder, "rawdata", str(newdata['filename']))) for k in newdata.keys(): self.samplesdf.at[sid, k] = newdata[k] self.samplesdf.to_csv(self.samplefile, index=False, encoding='ISO-8859-1') def update_form(self, Dialog): # empty variables self.edits = None self.combobox = None self.buttons = None self.radios = None self.labs = None self.labels = None # empty layout for i in reversed(range(self.gridLayout.count())): self.gridLayout.itemAt(i).widget().setParent(None) self.prepare_form(Dialog) def retranslateUi(self, Dialog): Dialog.setWindowTitle(_translate("Dialog", "Samples Manager", None)) # self.label.setText(_translate("Dialog", "File", None))<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2013, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- """ ## @file Internal package. Package containing modules that are used internally by Numenta Python tools and plugins to extend standard library functionality. These modules should NOT be used by client applications. The following modules are included: nupic.support.paths Module containing filesystem path manipulation utilities. nupic.support.serialization Module containing Python object serialization (pickling and unpickling) and versioning utilities. nupic.support.compress Module containing Python object encoding and compression utilities. nupic.support.processes Module containing operating system process management utilities and wrappers. nupic.support.output Module containing operating system interprocess communication utilities and wrappers. nupic.support.diff Module containing file difference calculation wrappers. nupic.support.vision Temporary location for vision framework before the move to nupic.vision. nupic.support.deprecate Contains the deprecate decorator used for automatic handling of deprecated methods. nupic.support.memchecker Contains the MemChecker class, for checking physical memory and monitoring memory usage. nupic.support.imagesearch Contains functions for searching for images on the web and downloading them. """ from __future__ import with_statement # Standard imports import os import sys import inspect import logging import logging.config import logging.handlers from platform import python_version import struct from StringIO import StringIO import time import traceback from configuration import Configuration from nupic.support.fshelpers import makeDirectoryFromAbsolutePath # Local imports ############################################################################# def getCallerInfo(depth=2): """Utility function to get information about function callers The information is the tuple (function/method name, filename, class) The class will be None if the caller is just a function and not an object method. depth: how far back in the callstack to go to extract the caller info """ f = sys._getframe(depth) method_name = f.f_code.co_name filename = f.f_code.co_filename arg_class = None args = inspect.getargvalues(f) if len(args[0]) > 0: arg_name = args[0][0] # potentially the 'self' arg if its a method arg_class = args[3][arg_name].__class__.__name__ return (method_name, filename, arg_class) ############################################################################# def title(s=None, additional='', stream=sys.stdout, frame='-'): """Utility function to display nice titles It automatically extracts the name of the function/method it is called from and you can add additional text. title() will then print the name of the function/method and the additional text surrounded by tow lines of dashes. If you don't want the name of the function, you can provide alternative text (regardless of the additional text) @param s - text to display, uses the function name and arguments by default @param additional - extra text to display (not needed if s is not None) @param stream - the stream to print to. Ny default goes to standard output @param frame - the character used for the over and under line. Default is '-' Examples: def foo(): title() will display: --- foo --- def foo(): title(additional='(), this is cool!!!') will display: ---------------------- foo(), this is cool!!! ---------------------- def foo(): title('No function name here!') will display: ---------------------- No function name here! ---------------------- """ if s is None: callable_name, file_name, class_name = getCallerInfo(2) s = callable_name if class_name is not None: method_name = s s = class_name + '.' + callable_name lines = (s + additional).split('\n') length = max(len(line) for line in lines) print >> stream, '-' * length print >> stream, s + additional print >> stream, '-' * length ############################################################################# def bringToFront(title): """Bring a top-level window with a given title to the front on Windows""" if sys.platform != 'win32': return import ctypes find_window = ctypes.windll.user32.FindWindowA set_foreground_window = ctypes.windll.user32.SetForegroundWindow hwnd = find_window(None, title) if hwnd == 0: raise Exception('There is no window titled: "%s"' % title) set_foreground_window(hwnd) ############################################################################# def getUserDocumentsPath(): """ Find the user's "Documents" directory (OS X), "My Documents" directory (Windows), or home directory (Unix). """ # OS X and Windows code from: # http://www.blueskyonmars.com/2005/08/05 # /finding-a-users-my-documents-folder-on-windows/ # Alternate Windows code from: # http://bugs.python.org/issue1763 if sys.platform.startswith('win'): if sys.platform.startswith('win32'): # Try the primary method on 32-bit windows try: from win32com.shell import shell alt = False except ImportError: try: import ctypes dll = ctypes.windll.shell32 alt = True except: raise Exception("Could not find 'My Documents'") else: # Use the alternate method on 64-bit Windows alt = True if not alt: # Primary method using win32com df = shell.SHGetDesktopFolder() pidl = df.ParseDisplayName(0, None, "::{450d8fba-ad25-11d0-98a8-0800361b1103}")[1] path = shell.SHGetPathFromIDList(pidl) else: # Alternate method using ctypes rather than win32com buf = ctypes.create_string_buffer(300) dll.SHGetSpecialFolderPathA(None, buf, 0x0005, False) path = buf.value elif sys.platform.startswith('darwin'): from Carbon import Folder, Folders folderref = Folder.FSFindFolder(Folders.kUserDomain, Folders.kDocumentsFolderType, False) path = folderref.as_pathname() else: path = os.getenv('HOME') return path ############################################################################# def getArgumentDescriptions(f): """ Get the arguments, default values, and argument descriptions for a function. Returns a list of tuples: (argName, argDescription, defaultValue). If an argument has no default value, the tuple is only two elements long (as None cannot be used, since it could be a default value itself). Parses the argument descriptions out of the function docstring, using a format something lke this: [junk] argument_name: description... description... description... [junk] [more arguments] It will find an argument as long as the exact argument name starts the line. It will then strip a trailing colon, if present, then strip the rest of the line and use it to start the description. It will then strip and append any subsequent lines with a greater indent level than the original argument name. """ # Get the argument names and default values argspec = inspect.getargspec(f) # Scan through the docstring to extract documentation for each argument as # follows: # Check the first word of the line, stripping a colon if one is present. # If it matches an argument name: # Take the rest of the line, stripping leading whitespeace # Take each subsequent line if its indentation level is greater than the # initial indentation level # Once the indentation level is back to the original level, look for # another argument docstring = f.__doc__ descriptions = {} if docstring: lines = docstring.split('\n') i = 0 while i < len(lines): stripped = lines[i].lstrip() if not stripped: i += 1 continue # Indentation level is index of the first character indentLevel = lines[i].index(stripped[0]) # Get the first word and remove the colon, if present firstWord = stripped.split()[0] if firstWord.endswith(':'): firstWord = firstWord[:-1] if firstWord in argspec.args: # Found an argument argName = firstWord restOfLine = stripped[len(firstWord)+1:].strip() argLines = [restOfLine] # Take the next lines as long as they are indented more i += 1 while i < len(lines): stripped = lines[i].lstrip() if not stripped: # Empty line - stop break if lines[i].index(stripped[0]) <= indentLevel: # No longer indented far enough - stop break # This line counts too argLines.append(lines[i].strip()) i += 1 # Store this description descriptions[argName] = ' '.join(argLines) else: # Not an argument i += 1 # Build the list of (argName, description, defaultValue) args = [] if argspec.defaults: defaultCount = len(argspec.defaults) else: defaultCount = 0 nonDefaultArgCount = len(argspec.args) - defaultCount for i, argName in enumerate(argspec.args): if i >= nonDefaultArgCount: defaultValue = argspec.defaults[i - nonDefaultArgCount] args.append((argName, descriptions.get(argName, ""), defaultValue)) else: args.append((argName, descriptions.get(argName, ""))) return args ############################################################################# # TODO queryNumInwardIters appears to be unused and should probably be deleted # from here altogether; it's likely an artifact of the legacy vision support. #def queryNumInwardIters(configPath, radialLength, numRepetitions=1): # """ # Public utility API that accepts a config path and # radial length, and determines the proper number of # training iterations with which to invoke net.run() # when running a PictureSensor in 'inward' mode. # """ # numCats = queryNumCategories(configPath) # sequenceLen = radialLength + 1 # numItersPerCat = (8 * radialLength) * sequenceLen # numTrainingItersTP = numItersPerCat * numCats # return numTrainingItersTP * numRepetitions ############################################################################# gLoggingInitialized = False def initLogging(verbose=False, console='stdout', consoleLevel='DEBUG'): """ Initilize NuPic logging by reading in from the logging configuration file. The logging configuration file is named 'nupic-logging.conf' and is expected to be in the format defined by the python logging module. If the environment variable 'NTA_CONF_PATH' is defined, then the logging configuration file is expected to be in the NTA_CONF_PATH directory. If NTA_CONF_PATH is not defined, then it is found in the 'conf/default' subdirectory of the NuPic installation directory (typically ~/nta/current/conf/default) The logging configuration file can use the environment variable 'NTA_LOG_DIR' to set the locations of log files. If this variable is not defined already in the environment, this method will set it to the 'logs' subdirectory of the NuPic install directory (typically ~/nta/eng/logs) before loading in the configuration file. console: Defines console output for the default "root" logging configuration; this may be one of 'stdout', 'stderr', or None; Use None to suppress console logging output consoleLevel: Logging-level filter string for console output corresponding to logging levels in the logging module; may be one of: 'DEBUG', 'INFO', 'WARNING', 'ERROR', or 'CRITICAL'. E.g., a value of'WARNING' suppresses DEBUG and INFO level output to console, but allows WARNING, ERROR, and CRITICAL """ # NOTE: If you call this twice from the same process there seems to be a # bug - logged messages don't show up for loggers that you do another # logging.getLogger() on. global gLoggingInitialized if gLoggingInitialized: if verbose: print >> sys.stderr, "Logging already initialized, doing nothing." return consoleStreamMappings = { 'stdout' : 'stdoutConsoleHandler', 'stderr' : 'stderrConsoleHandler', } consoleLogLevels = ['DEBUG', 'INFO', 'WARNING', 'WARN', 'ERROR', 'CRITICAL', 'FATAL'] assert console is None or console in consoleStreamMappings.keys(), ( 'Unexpected console arg value: %r') % (console,) assert consoleLevel in consoleLogLevels, ( 'Unexpected consoleLevel arg value: %r') % (consoleLevel) # ----------------------------------------------------------------------- # Setup logging. Look for the nupic-logging.conf file, first in the # NTA_CONFIG_DIR path (if defined), then in a subdirectory of the nupic # module # TODO: move into nupic.support configFilename = 'nupic-logging.conf' try: configFilePath = Configuration.findConfigFile(configFilename) except: configFilePath = None # If NTA_LOG_DIR is not defined, set it now. This is used by the logging # config file to set the path for the log files if 'NTA_LOG_DIR' not in os.environ: os.environ['NTA_LOG_DIR'] = os.path.join(os.environ['NUPIC'], 'logs') if not os.path.exists(os.environ['NTA_LOG_DIR']): makeDirectoryFromAbsolutePath(os.path.abspath(os.environ['NTA_LOG_DIR'])) # Load in the logging configuration file if configFilePath is None: print >> sys.stderr, ( "WARNING: Could not find the logging configuration file " \ "(filename: '%s', expected to be in search path: %s). Logging is " \ " disabled.") % (configFilename, Configuration.getConfigPaths()) else: if verbose: print >> sys.stderr, ( "Using logging configuration file: %s") % (configFilePath) # This dict will hold our replacement strings for logging configuration replacements = dict() def makeKey(name): """ Makes replacement key """ return "$$%s$$" % (name) platform = sys.platform.lower() if platform.startswith('java'): # Jython import java.lang platform = java.lang.System.getProperty("os.name").lower() if platform.startswith('mac os x'): platform = 'darwin' if platform.startswith('darwin'): replacements[makeKey('SYSLOG_HANDLER_ADDRESS')] = '"/var/run/syslog"' elif platform.startswith('linux'): replacements[makeKey('SYSLOG_HANDLER_ADDRESS')] = '"/dev/log"' else: raise RuntimeError("This platform is neither darwin nor linux: %s" % ( sys.platform,)) if False: #os.path.isdir('/var/log/numenta/nupic'): # NOTE: Not using syslogHandler for now because it either truncates or # drops messages over ~1,400 bytes (depending on platform) # Nupic logs go to syslog. Also, SysLogHandler raises an exception # on jython (at least on 2.5.2): "AttributeError: 'module' object has no # attribute 'AF_UNIX'" (jython is used by a sub-moduleof # ClientJobManager) replacements[makeKey('PERSISTENT_LOG_HANDLER')] = 'syslogHandler' else: # Nupic logs go to file replacements[makeKey('PERSISTENT_LOG_HANDLER')] = 'fileHandler' # Set up log file path for the default file handler logFilePath = _genLoggingFilePath() makeDirectoryFromAbsolutePath(os.path.dirname(logFilePath)) replacements[makeKey('FILE_HANDLER_LOG_FILENAME')] = repr(logFilePath) # Set up root logger replacements[makeKey('ROOT_LOGGER_HANDLERS')] = ( replacements[makeKey('PERSISTENT_LOG_HANDLER')]) if console is not None: replacements[makeKey('ROOT_LOGGER_HANDLERS')] += ( ',' + consoleStreamMappings[console]) # Set up log level for console handlers replacements[makeKey('CONSOLE_LOG_LEVEL')] = consoleLevel customConfig = StringIO() with open(configFilePath) as src: for lineNum, line in enumerate(src): if "$$" in line: for (key, value) in replacements.items(): line = line.replace(key, value) # If there is still a replacement string in the line, we're missing it # from our replacements dict if "$$" in line and "$$<key>$$" not in line: raise RuntimeError(("The text %r, found at line #%d of file %r, " "contains a string not found in our replacement " "dict.") % (line, lineNum, configFilePath)) customConfig.write(line) customConfig.seek(0) if python_version()[:3] >= '2.6': # NOTE: the disable_existing_loggers arg is new as of Python 2.6, so it's # not supported on our jython interperter, which was v2.5.x as of this # writing logging.config.fileConfig(customConfig, disable_existing_loggers=False) else: logging.config.fileConfig(customConfig) gLoggingInitialized = True ############################################################################# def reinitLoggingDir(): """ (Re-)Initialize the loging directory for the calling application that uses initLogging() for logging configuration NOTE: It's typially unnecessary to call this function directly since initLogging takes care of it for you. This function is exposed primarily for the benefit of nupic-services.py to allow it to restore its logging directory after the hard-reset operation. """ if gLoggingInitialized: makeDirectoryFromAbsolutePath(os.path.dirname(_genLoggingFilePath())) ############################################################################# def _genLoggingFilePath(): """ Generate a filepath for the calling app """<|fim▁hole|> os.environ['NTA_LOG_DIR'], 'numenta-logs-%s' % (os.environ['USER'],), appName)) appLogFileName = '%s-%s-%s.log' % ( appName, long(time.mktime(time.gmtime())), os.getpid()) return os.path.join(appLogDir, appLogFileName) ############################################################################# def enableLoggingErrorDebugging(): """ Overrides the python logging facility's Handler.handleError function to raise an exception instead of print and suppressing it. This allows a deeper stacktrace to be emitted that is very helpful for quickly finding the file/line that initiated the invalidly-formatted logging operation. NOTE: This is for debugging only - be sure to remove the call to this function *before* checking in your changes to the source code repository, as it will cause the application to fail if some invalidly-formatted logging statement still exists in your code. Example usage: enableLoggingErrorDebugging must be called *after* initLogging() import nupic.support nupic.support.initLogging() nupic.support.enableLoggingErrorDebugging() "TypeError: not all arguments converted during string formatting" is an example exception that might be output by the built-in handlers with the following very shallow traceback that doesn't go deep enough to show the source of the problem: File ".../python2.6/logging/__init__.py", line 776, in emit msg = self.format(record) File ".../python2.6/logging/__init__.py", line 654, in format return fmt.format(record) File ".../python2.6/logging/__init__.py", line 436, in format record.message = record.getMessage() File ".../python2.6/logging/__init__.py", line 306, in getMessage msg = msg % self.args TypeError: not all arguments converted during string formatting """ print >> sys.stderr, ("WARNING") print >> sys.stderr, ("WARNING: " "nupic.support.enableLoggingErrorDebugging() was " "called to install a debugging patch into all logging handlers that " "will cause the program to fail if a logging exception occurrs; this " "call is for debugging only and MUST be removed before checking in code " "into production system. Caller: %s") % ( traceback.format_stack(),) print >> sys.stderr, ("WARNING") def handleErrorPatch(*args, **kwargs): if logging.raiseExceptions: raise for handler in logging._handlerList: handler.handleError = handleErrorPatch return ############################################################################# def clippedObj(obj, maxElementSize=64): """ Return a clipped version of obj suitable for printing, This is useful when generating log messages by printing data structures, but don't want the message to be too long. If passed in a dict, list, or namedtuple, each element of the structure's string representation will be limited to 'maxElementSize' characters. This will return a new object where the string representation of each element has been truncated to fit within maxElementSize. """ # Is it a named tuple? if hasattr(obj, '_asdict'): obj = obj._asdict() # Printing a dict? if isinstance(obj, dict): objOut = dict() for key,val in obj.iteritems(): objOut[key] = clippedObj(val) # Printing a list? elif hasattr(obj, '__iter__'): objOut = [] for val in obj: objOut.append(clippedObj(val)) # Some other object else: objOut = str(obj) if len(objOut) > maxElementSize: objOut = objOut[0:maxElementSize] + '...' return objOut ############################################################################### def intTo8ByteArray(inValue): """ Converts an int to a packed byte array, with left most significant byte """ values = ( (inValue >> 56 ) & 0xff, (inValue >> 48 ) & 0xff, (inValue >> 40 ) & 0xff, (inValue >> 32 ) & 0xff, (inValue >> 24 ) & 0xff, (inValue >> 16 ) & 0xff, (inValue >> 8 ) & 0xff, inValue & 0xff ) s = struct.Struct('B B B B B B B B') packed_data = s.pack(*values) return packed_data ############################################################################### def byteArrayToInt(packed_data): """ Converts a byte array into an integer """ value = struct.unpack('B B B B B B B B', packed_data) return value[0] << 56 | \ value[1] << 48 | \ value[2] << 40 | \ value[3] << 32 | \ value[4] << 24 | \ value[5] << 16 | \ value[6] << 8 | \ value[7] ############################################################################### def getSpecialRowID(): """ Special row id is 0xFF FFFF FFFF FFFF FFFF (9 bytes of 0xFF) """ values = (0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF) s = struct.Struct('B B B B B B B B B') packed_data = s.pack(*values) return packed_data ################################################################################ _FLOAT_SECONDS_IN_A_DAY = 24.0 * 60.0 * 60.0 def floatSecondsFromTimedelta(td): """ Convert datetime.timedelta to seconds in floating point """ sec = (td.days * _FLOAT_SECONDS_IN_A_DAY + td.seconds * 1.0 + td.microseconds / 1E6) return sec ############################################################################# def aggregationToMonthsSeconds(interval): """ Return the number of months and seconds from an aggregation dict that represents a date and time. Interval is a dict that contain one or more of the following keys: 'years', 'months', 'weeks', 'days', 'hours', 'minutes', seconds', 'milliseconds', 'microseconds'. Parameters: --------------------------------------------------------------------- interval: The aggregation interval, as a dict representing a date and time retval: number of months and seconds in the interval, as a dict: {months': XX, 'seconds': XX}. The seconds is a floating point that can represent resolutions down to a microsecond. For example: aggregationMicroseconds({'years': 1, 'hours': 4, 'microseconds':42}) == {'months':12, 'seconds':14400.000042} """ seconds = interval.get('microseconds', 0) * 0.000001 seconds += interval.get('milliseconds', 0) * 0.001 seconds += interval.get('seconds', 0) seconds += interval.get('minutes', 0) * 60 seconds += interval.get('hours', 0) * 60 * 60 seconds += interval.get('days', 0) * 24 * 60 * 60 seconds += interval.get('weeks', 0) * 7 * 24 * 60 * 60 months = interval.get('months', 0) months += 12 * interval.get('years', 0) return {'months': months, 'seconds': seconds} ############################################################################# def aggregationDivide(dividend, divisor): """ Return the result from dividing two dicts that represent date and time. Both dividend and divisor are dicts that contain one or more of the following keys: 'years', 'months', 'weeks', 'days', 'hours', 'minutes', seconds', 'milliseconds', 'microseconds'. Parameters: --------------------------------------------------------------------- dividend: The numerator, as a dict representing a date and time divisor: the denominator, as a dict representing a date and time retval: number of times divisor goes into dividend, as a floating point number. For example: aggregationDivide({'hours': 4}, {'minutes': 15}) == 16 """ # Convert each into microseconds dividendMonthSec = aggregationToMonthsSeconds(dividend) divisorMonthSec = aggregationToMonthsSeconds(divisor) # It is a usage error to mix both months and seconds in the same operation if (dividendMonthSec['months'] != 0 and divisorMonthSec['seconds'] != 0) \ or (dividendMonthSec['seconds'] != 0 and divisorMonthSec['months'] != 0): raise RuntimeError("Aggregation dicts with months/years can only be " "inter-operated with other aggregation dicts that contain " "months/years") if dividendMonthSec['months'] > 0: return float(dividendMonthSec['months']) / divisor['months'] else: return float(dividendMonthSec['seconds']) / divisorMonthSec['seconds']<|fim▁end|>
appName = os.path.splitext(os.path.basename(sys.argv[0]))[0] or 'UnknownApp' appLogDir = os.path.abspath(os.path.join(
<|file_name|>lecture_03.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Лекция http://uneex.ru/LecturesCMC/PythonIntro2014/03_DataTypes <|fim▁hole|> # print decimal.Decimal(1.1) + decimal.Decimal(1.1) # print decimal.Decimal("1.1") + decimal.Decimal("1.1") # print dir(random) a = [] for j in range(0, 10): a.append(random.randrange(100)) print a<|fim▁end|>
import decimal import random
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#![allow(unused)] use poisson::{algorithm, Builder, Float, Type, Vector}; use rand::distributions::{Distribution, Standard}; use rand::{rngs::SmallRng, SeedableRng}; use num_traits::NumCast; use alga::general::AbstractField; use alga::linear::{FiniteDimVectorSpace, NormedSpace}; use std::fmt::Debug; pub fn print_v<F: Float, V: Vector<F>>(v: V) -> String { let mut result = "(".to_owned(); for i in 0..V::dimension() { result.push_str(&format!("{}, ", v[i].to_f64().unwrap())); } if V::dimension() != 0 { result.pop(); } result.push(')'); result } #[derive(Clone, Copy)] pub enum When { Always, Sometimes, Never, } pub fn test_with_samples<T>(samples: usize, relative_radius: f64, seeds: u32, ptype: Type) where T: Debug + Vector<f64> + Copy, Standard: Distribution<T>, { test_with_samples_prefilled( samples, relative_radius, seeds, ptype, |_| |_| None::<T>, When::Always, ); } pub fn test_with_samples_prefilled<'r, T, F, I>( samples: usize, relative_radius: f64, seeds: u32,<|fim▁hole|> ptype: Type, mut prefiller: F, valid: When, ) where T: 'r + Debug + Vector<f64> + Copy, F: FnMut(f64) -> I, I: FnMut(Option<T>) -> Option<T>, Standard: Distribution<f64>, Standard: Distribution<T>, { test_algo( samples, relative_radius, seeds, ptype, &mut prefiller, valid, algorithm::Ebeida, ); test_algo( samples, relative_radius, seeds, ptype, &mut prefiller, valid, algorithm::Bridson, ); } fn test_algo<'r, T, F, I, A>( samples: usize, relative_radius: f64, seeds: u32, ptype: Type, prefiller: &mut F, valid: When, algo: A, ) where T: 'r + Debug + Vector<f64> + Copy, F: FnMut(f64) -> I, I: FnMut(Option<T>) -> Option<T>, A: algorithm::Creator<f64, T>, Standard: Distribution<f64>, Standard: Distribution<T>, { use self::When::*; for i in 0..seeds { let mut prefilled = vec![]; let rand = SmallRng::from_seed([ (i * 3 + 2741) as u8, (i * 7 + 2729) as u8, (i * 13 + 2713) as u8, (i * 19 + 2707) as u8, (i * 29 + 2693) as u8, (i * 37 + 2687) as u8, (i * 43 + 2677) as u8, (i * 53 + 2663) as u8, (i * 61 + 2657) as u8, (i * 71 + 2633) as u8, (i * 79 + 2609) as u8, (i * 89 + 2591) as u8, (i * 101 + 2557) as u8, (i * 107 + 2549) as u8, (i * 113 + 2539) as u8, (i * 131 + 2521) as u8, ]); let mut poisson_iter = Builder::with_samples(samples, relative_radius, ptype) .build(rand, algo) .into_iter(); let mut poisson = vec![]; let mut prefill = (prefiller)(poisson_iter.radius()); let mut last = None; let mut does_prefill = false; loop { while let Some(p) = (prefill)(last) { does_prefill = true; match valid { Always => assert!( poisson_iter.stays_legal(p), "All prefilled should be accepted by the '{:?}' algorithm. \ {} was rejected.", algo, print_v(p) ), Never => assert!( !poisson_iter.stays_legal(p), "All prefilled should be rejected by the '{:?}' algorithm. \ {} was allowed even though {:?} was last to be generated.", algo, print_v(p), last.map(print_v) ), _ => {} } prefilled.push(p); poisson_iter.restrict(p); } if let Some(pp) = poisson_iter.next() { last = Some(pp); poisson.push(pp); } else { break; } } let radius = poisson_iter.radius(); let poisson_type = poisson_iter.poisson_type(); let poisson = poisson.into_iter().chain( if let Always = valid { prefilled } else { vec![] } .into_iter(), ); test_poisson(poisson, radius, poisson_type, algo, does_prefill); } } pub fn test_poisson<F, I, T, A>(poisson: I, radius: F, poisson_type: Type, algo: A, does_prefill: bool) where I: Iterator<Item = T>, F: Float, T: Debug + Vector<F> + Copy, A: algorithm::Creator<F, T>, { use poisson::Type::*; let dim = T::dimension(); let mut vecs = vec![]; let mut hints = vec![]; { let mut iter = poisson.into_iter(); while let Some(v) = iter.next() { if let (low, Some(high)) = iter.size_hint() { hints.push((low, high)); } else { panic!( "There wasn't hint for {}th iteration for the '{:?}' algorithm.", hints.len(), algo ); } vecs.push(v); } } let len = hints.len(); for (n, (l, h)) in hints.into_iter().enumerate() { let remaining = len - (n + 1); assert!(l <= remaining, "For the '{:?}' algorithm the lower bound of hint should be smaller than or equal to actual: {} <= {}", algo, l, remaining); assert!(h >= remaining, "For the '{:?}' algorithm the upper bound of hint should be larger than or equal to actual: {} >= {}", algo, h, remaining); } if !does_prefill { for v in &vecs { for n in 0..T::dimension() { assert!(v[n] >= F::cast(0)); assert!(v[n] < F::cast(1)); } } } let vecs = match poisson_type { Perioditic => { let mut vecs2 = vec![]; for n in 0..3i64.pow(dim as u32) { let mut t = T::zero(); let mut div = n; for i in 0..T::dimension() { let rem = div % 3; div /= 3; t[i] = NumCast::from(rem - 1).unwrap(); } for v in &vecs { vecs2.push(*v + t); } } vecs2 } Normal => vecs, }; //TODO: Figure out how to check if distribution is maximal. assert_legal_poisson(&vecs, radius, algo); } pub fn assert_legal_poisson<F, T, A>(vecs: &Vec<T>, radius: F, algo: A) where F: Float, T: Debug + Vector<F> + Copy, A: algorithm::Creator<F, T>, { for &v1 in vecs { for &v2 in vecs { if v1 == v2 { continue; } let dist = (v1 - v2).norm(); assert!(dist > radius * F::cast(2), "Poisson-disk distribution requirement not met while generating using the '{:?}' algorithm: There exists 2 vectors with \ distance to each other of {} which is smaller than smallest allowed one {}. \ The samples: [{:?}, {:?}]", algo, dist.to_f64().unwrap(), radius.to_f64().unwrap() * 2., v1, v2); } } }<|fim▁end|>
<|file_name|>TrajectoryDrawing.hpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2004 * Swiss Federal Institute of Technology, Lausanne. All rights reserved. * * Developed at the Autonomous Systems Lab. * Visit our homepage at http://asl.epfl.ch/ * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA */ #ifndef NPM_TRAJECTORYDRAWING_HPP #define NPM_TRAJECTORYDRAWING_HPP #include <npm/gfx/Drawing.hpp> namespace npm { class RobotServer; class TrajectoryDrawing : public Drawing { public: TrajectoryDrawing(const RobotServer * owner); virtual void Draw(); private:<|fim▁hole|> const RobotServer * m_owner; }; } #endif // NPM_TRAJECTORYDRAWING_HPP<|fim▁end|>
<|file_name|>common.py<|end_file_name|><|fim▁begin|>from django.utils.encoding import force_text import re from django.utils import six from ginger import serializer from jinja2 import Markup __all__ = ['html_json', 'html_attrs', "Element", "CssClassList", "CssStyle", 'add_css_class', 'empty'] def html_json(values): content = serializer.encode(values) try: content = content.encode("unicode-escape") except LookupError: content = content.encode("string-escape") return Markup(content) def html_attrs(*args, **kwargs): attr = HtmlAttr() attr.update(*args, **kwargs) return six.text_type(attr) def add_css_class(original_class, *css_classes): css = CssClassList() css.append(original_class) css.append(css_classes) return six.text_type(css) class CssClassList(object): def __init__(self): self.classes = [] def __iter__(self): return iter(self.classes) def __len__(self): return len(self.classes) def copy(self): value = CssClassList() value.classes.extend(self.classes) return value def append(self, value): if isinstance(value, six.text_type): value = re.sub(r'\s+', ' ', value.strip()) if len(value) == 1: value = value[0] if isinstance(value, (tuple, list)): for val in value: self.append(val) else: if value not in self.classes: self.classes.append(value) def __contains__(self, item): return item in self.classes def __str__(self): return " ".join(str(c) for c in self.classes if c) class CssStyle(dict): def render(self): return ";".join("%s:%s" % (key.replace("_", "-"), value) for (key, value) in six.iteritems(self)) def __str__(self): return self.render() def copy(self): return CssStyle(super(CssStyle, self).copy()) def _normalize(key): if key.endswith("_"): key = key[:-1] key = key.replace("__", ":").replace("_", "-") return key class HtmlAttr(object): def __init__(self): self.attrs = {} self.styles = CssStyle() self.classes = CssClassList() def copy(self): attr = HtmlAttr() attr.attrs = self.attrs.copy() attr.styles = self.styles.copy() attr.classes = self.classes.copy() return attr def dict(self): return dict(self) def __setitem__(self, key, value): self.set(key, value) def __getitem__(self, item): return dict(self)[item] def __len__(self): return len(dict(self)) def get(self, key): return dict(self).get(key) def set(self, key, value): key = _normalize(key) if key in {"class"}: self.classes.append(value) elif key == "style": self.styles.update(value) else: self.attrs[key] = value def update(self, *args, **attrs): values = {} values.update(*args, **attrs) for k, v in values.items(): self.set(k, v) def __iter__(self): for k, v in six.iteritems(self.attrs): yield k, v if self.classes: yield "class", six.text_type(self.classes) if self.styles: yield "style", self.styles.render() def render(self): pairs = [] for key, value in self: if value is None or value is False: continue if value is True: pairs.append(key) else: if not isinstance(value, six.string_types): value = html_json(value) pairs.append("%s='%s'" % (key, str(value))) return " ".join(pairs) def __str__(self): return self.render() class Element(object): def __init__(self, tag): self.tag = tag self.attrib = HtmlAttr()<|fim▁hole|> el.attrib.update(kwargs) return el def __getitem__(self, item): el = self.copy() if not isinstance(item, (list, tuple)): item = [item] for c in item: el.append(c) return el def copy(self): el = self.__class__(self.tag) el.attrib = self.attrib.copy() el.children = self.children[:] return el def mutate(self, tag): el = tag.copy() el.attrib.update(self.attrib.copy()) el.children = self.children[:] return el def append(self, child): if child is None: return if isinstance(child, (list, tuple)): for c in child: self.append(c) else: self.children.append(child) def convert_to_text(self, el, *args, **kwargs): return el.render(*args, **kwargs) if hasattr(el, 'render') else force_text(el) def render_children(self, *args, **kwargs): return "".join(filter(None, (self.convert_to_text(c, *args, **kwargs)for c in self.children))) def render(self, ctx=None): if self.attrib.get('if') is False: return None attrs = self.attrib content = self.render_children(ctx) tag = _normalize(self.tag) return u"<{tag} {attrs}>{content}</{tag}>".format(**locals()) def __str__(self): return self.render() def __html__(self): return self.render() class Empty(Element): def render(self, *args, **kwargs): return self.render_children(*args, **kwargs) empty = Empty("none") for name in "html body link meta div span form section article aside main ul li ol dl dd dt p a strong "\ "i fieldset legend b em input select button label nav textarea " \ "table tbody tfoot thead tr td th figure caption img".split(" "): __all__.append(name) globals()[name] = Element(name) if __name__ == '__main__': print(input(type="radio", checked=False).render())<|fim▁end|>
self.children = [] def __call__(self, **kwargs): el = self.copy()
<|file_name|>StockageException.java<|end_file_name|><|fim▁begin|>package fr.pizzeria.exception; public class StockageException extends Exception { public StockageException() { super(); } <|fim▁hole|> super(message, cause); } public StockageException(String message) { super(message); } public StockageException(Throwable cause) { super(cause); } }<|fim▁end|>
public StockageException(String message, Throwable cause) {
<|file_name|>messages.go<|end_file_name|><|fim▁begin|>// The goal of this package is to integrate structured logging an metrics // reporting with error handling in an interface as close as possible to the // fluency of fmt.Errorf(...) // or of errors.Wrapf(err, "fmt", args...) // Concerns: // a. structured logging using a defined scheme // b. build-time checking of errors // c. 3 purposes, which each message type can make use of 1-3 of: // logging to ELK, // metrics collection // error reporting // d. Contextualization - i.e. pull message fields from a context.Context // or from a logging context likewise contextualized. // e. ELK specific fields (i.e. "this is schema xyz") // Nice to have: // z. Output filtering disjoint from creation (i.e. *not* log.debug but rather debug stuff from the singularity API) // y. Runtime output filtering, via e.g. HTTP requests. // x. A live ringbuffer of all messages // b & d are in tension. // also, a with OTLs, because optional fields package logging // go get github.com/opentable/go-loglov3-gen (private repo) //go:generate go-loglov3-gen -loglov3-dir $LOGLOV3_DIR -output-dir . import ( "bytes" "fmt" "io" "os" "strings" "time" ) type ( messageSink interface { } /* Counter interface { Clear() Inc(int64) Dec(int64) } // Timer is a write-only interface over a timer. Timer interface { Time(func()) Update(time.Duration) UpdateSince(time.Time) } // Updater is a generalization of write-only metrics - integers that can be set. // e.g. simple gauges or analyzed samples etc. Updater interface { Update(int64) } */ // A LogSink can be used in Deliver to send messages for logging. LogSink interface { // Child returns a namespaced child, with a set of EachFielders for context. Child(name string, context ...EachFielder) LogSink // Fields is used to record the name/value fields of a structured message. Fields([]EachFielder) // Metrics returns a MetricsSink, which will be used to record MetricsMessages. Metrics() MetricsSink // Console returns a WriteDoner, which will be used to record ConsoleMessages. Console() WriteDoner // ExtraConsole returns a WriteDoner, which will be used to record ExtraConsoleMessages. ExtraConsole() WriteDoner // AtExit() does last-minute cleanup of stuff AtExit() // ForceDefer is used during testing to suspend the "panic during testing" behavior. ForceDefer() bool } // A MetricsSink is passed into a MetricsMessage's MetricsTo(), so that the // it can record its metrics. Once done, the Done method is called - if the // metrics are incomplete or insistent, the MetricsSink can then report // errors. // xxx this facility is preliminary, and Sous doesn't yet record these errors. MetricsSink interface { ClearCounter(name string) IncCounter(name string, amount int64) DecCounter(name string, amount int64) UpdateTimer(name string, dur time.Duration) UpdateTimerSince(name string, time time.Time) UpdateSample(name string, value int64) Done() } // WriteDoner is like a WriteCloser, but the Done message also asserts that something useful was written // After a console message has been written, the Done method is called, so // that the WriteDoner can report about badly formed or missing console // messages. // xxx this facility is preliminary, and Sous doesn't yet record these errors. WriteDoner interface { io.Writer Done() } writeDoner struct { io.Writer } // A EachFielder provides EachField - which calls its argument for each field it wants to submit for logging. EachFielder interface { EachField(fn FieldReportFn) } // A LevelRecommender can recommend a log level. LevelRecommender interface { RecommendedLevel() Level } // Submessage is not a complete message on its own Submessage interface { EachFielder LevelRecommender } // OldLogMessage captures a deprecated interface // prefer instead to use EachFielder and include Severity and Message fields. // Don't do both though; make a clean break with this interface. OldLogMessage interface { // The severity level of this message, potentially (in the future) manipulated // by dynamic rules. DefaultLevel() Level // A simple textual message describing the logged event. Usually hardcoded (or almost so.) Message() string } // A LogMessage has structured data to report to the structured log server (c.f. Deliver). // Almost every implementation of LogMessage should include a CallerInfo. LogMessage interface { OldLogMessage // Called to report the individual fields for this message. EachField(fn FieldReportFn) } // A MetricsMessage has metrics data to record (c.f. Deliver) MetricsMessage interface { MetricsTo(MetricsSink) } // A ConsoleMessage has messages to report to a local human operator (c.f. Deliver) ConsoleMessage interface { WriteToConsole(console io.Writer) } // FieldReportFn is used by LogMessages to report their fields. FieldReportFn func(FieldName, interface{}) // A MessageField is a quick wrapper for string with EachField. MessageField string kv struct { k FieldName v interface{} } // ToConsole allows quick creation of Console messages. ToConsole struct { msg interface{} } consoleMessage struct { ToConsole } ) // All calls EachField on each of the arguments. func (frf FieldReportFn) All(efs ...EachFielder) { for _, ef := range efs { ef.EachField(frf) } } // KV creates a single-entry EachFielder with the FieldName as the name. func KV(n FieldName, v interface{}) EachFielder { return kv{k: n, v: v} } func (p kv) EachField(fn FieldReportFn) { fn(p.k, p.v) } // EachField implements EachFielder on MessageField. func (m MessageField) EachField(fn FieldReportFn) { fn(CallStackMessage, string(m)) } func (m MessageField) String() string { return string(m) } // WriteToConsole implements ConsoleMessage on ToConsole. func (tc ToConsole) WriteToConsole(c io.Writer) { fmt.Fprintf(c, fmt.Sprintf("%s\n", tc.msg)) } // Console marks a string as being suitable for console output. func Console(m interface{}) ToConsole { return ToConsole{msg: m} } // ConsoleAndMessage wraps a string such that it will be both a console output and the primary message of a log entry. func ConsoleAndMessage(m interface{}) consoleMessage { return consoleMessage{ToConsole{msg: m}} } func (m consoleMessage) EachField(fn FieldReportFn) { fn(CallStackMessage, fmt.Sprintf("%s", m.msg)) } /* A static analysis approach here would: Check that the JSON tags on structs matched the schemas they claim. Check that schema-required fields tie with params to the contructor. Maybe check that contexted messages were always receiving contexts with the right WithValues A code generation approach would: Take the schemas and produce structs with JSON tags Produce constructors for the structs with the required fields. Produce LogXXX methods and functions around those constructors. We can live without those, probably, if we build the interfaces *as if*... */ func nopDoner(w io.Writer) WriteDoner { return &writeDoner{w} } func (writeDoner) Done() {} // Deliver is the core of the logging messages design. // // The message argument may implement // any of LogMessage, MetricsMessage or ConsoleMessage, and the // data contained in the message will be dispatched appropriately. // // Furthermore, messages that don't implement any of those interfaces, // or which panic when operated upon, // themselves generate a well-tested message so that they can be caught and fixed. // // The upshot is that messages can be Delivered on the spot and // later determine what facilities are appropriate. func Deliver(logger LogSink, messages ...interface{}) { if logger == nil { panic("null logger") } //determine if function running under test, allow overwritten value from options functions testFlag := strings.HasSuffix(os.Args[0], ".test") if logger.ForceDefer() { testFlag = false } if !testFlag { defer loggingPanicsShouldntCrashTheApp(logger, messages) } items := partitionItems(messages) logger.Fields(items.eachFielders) metrics := logger.Metrics() for _, mm := range items.metricsMessages { mm.MetricsTo(metrics) } metrics.Done() for _, cm := range items.consoleMessages { cm.WriteToConsole(logger.Console()) } if _, dont := messages[0].(*silentMessageError); items.silent() && !dont { reportSilentMessage(logger, messages) } } type partitionedItems struct { eachFielders []EachFielder consoleMessages []ConsoleMessage metricsMessages []MetricsMessage } // holding item partitioning to logging time. func partitionItems(items []interface{}) partitionedItems { l := partitionedItems{} others := []interface{}{} for _, item := range items { ef, isef := item.(EachFielder) olm, isolm := item.(OldLogMessage) cm, iscm := item.(ConsoleMessage) mm, ismm := item.(MetricsMessage) if isef { if isolm { m := olm.Message() lvl := olm.DefaultLevel() l.eachFielders = append(l.eachFielders, MessageField(m), lvl) } l.eachFielders = append(l.eachFielders, ef) } if iscm { l.consoleMessages = append(l.consoleMessages, cm) } if ismm { l.metricsMessages = append(l.metricsMessages, mm) } if !(isef || iscm || ismm) { others = append(others, item) } } if !l.silent() && len(others) > 0 { l.eachFielders = append(l.eachFielders, assembleStrayFields(others...)) } return l } func (i partitionedItems) silent() bool { if len(i.eachFielders) > 0 { return false } if len(i.consoleMessages) > 0 { return false } if len(i.metricsMessages) > 0 { return false } return true } // a fake "message" designed to trigger the well-tested silentMessageError type loggingPanicFakeMessage struct { broken interface{} } // granted that logging can be set up in the first place, // problems with a logging message should not crash the whole app // therefore: recover the panic do the simplest thing that will be logged, func loggingPanicsShouldntCrashTheApp(ls LogSink, msg interface{}) { if rec := recover(); rec != nil { Deliver(ls, loggingPanicFakeMessage{msg}) } } // ClearCounter implements part of LogSink on LogSet func (ls LogSet) ClearCounter(name string) { ls.GetCounter(name).Clear() } <|fim▁hole|>} // DecCounter implements part of LogSink on LogSet func (ls LogSet) DecCounter(name string, amount int64) { ls.GetCounter(name).Dec(amount) } // UpdateTimer implements part of LogSink on LogSet func (ls LogSet) UpdateTimer(name string, dur time.Duration) { ls.GetTimer(name).Update(dur) } // UpdateTimerSince implements part of LogSink on LogSet func (ls LogSet) UpdateTimerSince(name string, time time.Time) { ls.GetTimer(name).UpdateSince(time) } // UpdateSample implements part of LogSink on LogSet func (ls LogSet) UpdateSample(name string, value int64) { ls.GetUpdater(name).Update(value) } // The plan here is to be able to extend this behavior such that e.g. the rules // for levels of messages can be configured or updated at runtime. func getLevel(lm LogMessage) Level { return lm.DefaultLevel() } // ConsoleError receives a ConsoleMessage and returns the string as it would be printed to the console. // This can be used to implement the error interface on ConsoleMessages func ConsoleError(msg ConsoleMessage) string { buf := &bytes.Buffer{} msg.WriteToConsole(buf) return buf.String() } // DefaultLevel is a convenience - by embedding a Level, a message can partially implement LogMessage func (lvl Level) DefaultLevel() Level { return lvl } // EachField implements EachFielder on OTLName. func (n OTLName) EachField(f FieldReportFn) { f(Loglov3Otl, n) }<|fim▁end|>
// IncCounter implements part of LogSink on LogSet func (ls LogSet) IncCounter(name string, amount int64) { ls.GetCounter(name).Inc(amount)
<|file_name|>coerce-unsafe-to-closure.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn main() { let x: Option<&[u8]> = Some("foo").map(std::mem::transmute); //~^ ERROR: mismatched types }<|fim▁end|>
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT.
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main import ( "net/http" "os" "github.com/sim4life/go_hero/Godeps/_workspace/src/github.com/russross/blackfriday" ) func GenerateMarkdown(rw http.ResponseWriter, r *http.Request) { markdown := blackfriday.MarkdownCommon([]byte(r.FormValue("body"))) rw.Write(markdown) } func main() { port := os.Getenv("PORT")<|fim▁hole|> port = "8080" } http.HandleFunc("/markdown", GenerateMarkdown) http.Handle("/", http.FileServer(http.Dir("public"))) http.ListenAndServe(":"+port, nil) }<|fim▁end|>
if port == "" {
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- ## # myapp.urls ## ## # Copyright (C) $YEAR$, $AUTHOR_NAME$ <$AUTHOR_EMAIL$> # # This program is free software: you can redistribute it and/or modify it # under the terms of version 3 of the GNU Affero General Public License as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details.<|fim▁hole|># You should have received a copy of the GNU General Public License along # with this source code; if not, see <http://www.gnu.org/licenses/>, # or write to # # Free Software Foundation, Inc. # 51 Franklin Street, Fifth Floor # Boston, MA 02110-1301 USA ## ## # End of File ##<|fim▁end|>
#
<|file_name|>filesystem.go<|end_file_name|><|fim▁begin|>// Copyright 2016 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package types import ( "github.com/coreos/ignition/config/shared/errors" "github.com/coreos/ignition/config/validate/report" ) type Filesystem struct { Name string `json:"name,omitempty"` Mount *FilesystemMount `json:"mount,omitempty"` Path *Path `json:"path,omitempty"` } type FilesystemMount struct { Device Path `json:"device,omitempty"` Format FilesystemFormat `json:"format,omitempty"` Create *FilesystemCreate `json:"create,omitempty"` } type FilesystemCreate struct { Force bool `json:"force,omitempty"` Options MkfsOptions `json:"options,omitempty"` } func (f Filesystem) Validate() report.Report { if f.Mount == nil && f.Path == nil { return report.ReportFromError(errors.ErrFilesystemNoMountPath, report.EntryError) }<|fim▁hole|> if f.Mount != nil && f.Path != nil { return report.ReportFromError(errors.ErrFilesystemMountAndPath, report.EntryError) } return report.Report{} } type FilesystemFormat string func (f FilesystemFormat) Validate() report.Report { switch f { case "ext4", "btrfs", "xfs": return report.Report{} default: return report.ReportFromError(errors.ErrFilesystemInvalidFormat, report.EntryError) } } type MkfsOptions []string<|fim▁end|>
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main import ( "flag"<|fim▁hole|> "os/signal" "syscall" "time" "go-common/app/job/main/ugcpay/conf" "go-common/app/job/main/ugcpay/server/http" "go-common/app/job/main/ugcpay/service" ecode "go-common/library/ecode/tip" "go-common/library/log" ) func main() { flag.Parse() if err := conf.Init(); err != nil { panic(err) } log.Init(conf.Conf.Log) defer log.Close() log.Info("ugcpay-job start") ecode.Init(conf.Conf.Ecode) svc := service.New(conf.Conf) http.Init(svc) c := make(chan os.Signal, 1) signal.Notify(c, syscall.SIGHUP, syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT) for { s := <-c log.Info("get a signal %s", s.String()) switch s { case syscall.SIGQUIT, syscall.SIGTERM, syscall.SIGINT: svc.Close() log.Info("ugcpay-job exit") time.Sleep(time.Second) return case syscall.SIGHUP: default: return } } }<|fim▁end|>
"os"
<|file_name|>proptest.py<|end_file_name|><|fim▁begin|>#!/usr/local/bin/python3 class TestClass(object): def foo(): doc = "The foo property." def fget(self): return self._foo def fset(self, value): self._foo = value def fdel(self): del self._foo return locals() foo = property(**foo()) <|fim▁hole|> doc = "The bar property." def fget(self): return self._bar def fset(self, value): self._bar = value def fdel(self): del self._bar return locals() bar = property(**bar()) def __init__(self, foo, bar): self.foo = "foo" self.bar = "bar" def test_method(self, attr): if attr == 1: prop = self.foo else: prop = self.bar print(prop) prop = 'TADA!' tc = TestClass(1,2) print(tc.foo) print(tc.bar) tc.test_method('foo') #print(tc.foo) #print(dir(tc))<|fim▁end|>
def bar():
<|file_name|>feed_parse_extractSweetjamtranslationsCom.py<|end_file_name|><|fim▁begin|>def extractSweetjamtranslationsCom(item): ''' Parser for 'sweetjamtranslations.com' ''' vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title']) if not (chp or vol) or "preview" in item['title'].lower(): return None tagmap = [ ('PRC', 'PRC', 'translated'),<|fim▁hole|> ] for tagname, name, tl_type in tagmap: if tagname in item['tags']: return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False<|fim▁end|>
('Loiterous', 'Loiterous', 'oel'),
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use std::io::{self, Read, Write}; use std::error::Error; use std::fmt; use std::convert::From; use control::FixedHeader; use control::fixed_header::FixedHeaderError; use control::variable_header::VariableHeaderError; use control::ControlType; use encodable::StringEncodeError; use topic_name::TopicNameError; use {Encodable, Decodable}; pub use self::connect::ConnectPacket; pub use self::connack::ConnackPacket; pub use self::publish::PublishPacket; pub use self::puback::PubackPacket; pub use self::pubrec::PubrecPacket; pub use self::pubrel::PubrelPacket; pub use self::pubcomp::PubcompPacket; pub use self::pingreq::PingreqPacket; pub use self::pingresp::PingrespPacket; pub use self::disconnect::DisconnectPacket; pub use self::subscribe::SubscribePacket; pub use self::suback::SubackPacket; pub use self::unsuback::UnsubackPacket; pub use self::unsubscribe::UnsubscribePacket; pub use self::publish::QoSWithPacketIdentifier; pub mod connect; pub mod connack; pub mod publish; pub mod puback; pub mod pubrec; pub mod pubrel; pub mod pubcomp; pub mod pingreq; pub mod pingresp; pub mod disconnect; pub mod subscribe; pub mod suback; pub mod unsuback; pub mod unsubscribe; pub trait Packet<'a>: Sized { type Payload: Encodable<'a> + Decodable<'a> + 'a; fn fixed_header(&self) -> &FixedHeader; fn payload(&self) -> &Self::Payload; fn encode_variable_headers<W: Write>(&self, writer: &mut W) -> Result<(), PacketError<'a, Self>>; fn encoded_variable_headers_length(&self) -> u32; fn decode_packet<R: Read>(reader: &mut R, fixed_header: FixedHeader) -> Result<Self, PacketError<'a, Self>>; } impl<'a, T: Packet<'a> + fmt::Debug + 'a> Encodable<'a> for T { type Err = PacketError<'a, T>; fn encode<W: Write>(&self, writer: &mut W) -> Result<(), PacketError<'a, T>> { try!(self.fixed_header().encode(writer)); try!(self.encode_variable_headers(writer)); self.payload().encode(writer).map_err(PacketError::PayloadError) } fn encoded_length(&self) -> u32 { self.fixed_header().encoded_length() + self.encoded_variable_headers_length() + self.payload().encoded_length() } } impl<'a, T: Packet<'a> + fmt::Debug + 'a> Decodable<'a> for T { type Err = PacketError<'a, T>; type Cond = FixedHeader; fn decode_with<R: Read>(reader: &mut R, fixed_header: Option<FixedHeader>) -> Result<Self, PacketError<'a, Self>> { let fixed_header: FixedHeader = if let Some(hdr) = fixed_header { hdr } else { try!(Decodable::decode(reader)) }; <Self as Packet>::decode_packet(reader, fixed_header) } } #[derive(Debug)] pub enum PacketError<'a, T: Packet<'a>> { FixedHeaderError(FixedHeaderError), VariableHeaderError(VariableHeaderError), PayloadError(<<T as Packet<'a>>::Payload as Encodable<'a>>::Err), MalformedPacket(String), StringEncodeError(StringEncodeError), IoError(io::Error), TopicNameError(TopicNameError), } impl<'a, T: Packet<'a>> fmt::Display for PacketError<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &PacketError::FixedHeaderError(ref err) => err.fmt(f), &PacketError::VariableHeaderError(ref err) => err.fmt(f), &PacketError::PayloadError(ref err) => err.fmt(f), &PacketError::MalformedPacket(ref err) => err.fmt(f), &PacketError::StringEncodeError(ref err) => err.fmt(f), &PacketError::IoError(ref err) => err.fmt(f), &PacketError::TopicNameError(ref err) => err.fmt(f), } } } impl<'a, T: Packet<'a> + fmt::Debug> Error for PacketError<'a, T> { fn description(&self) -> &str { match self { &PacketError::FixedHeaderError(ref err) => err.description(), &PacketError::VariableHeaderError(ref err) => err.description(), &PacketError::PayloadError(ref err) => err.description(), &PacketError::MalformedPacket(ref err) => &err[..], &PacketError::StringEncodeError(ref err) => err.description(), &PacketError::IoError(ref err) => err.description(), &PacketError::TopicNameError(ref err) => err.description(), } } fn cause(&self) -> Option<&Error> { match self { &PacketError::FixedHeaderError(ref err) => Some(err), &PacketError::VariableHeaderError(ref err) => Some(err), &PacketError::PayloadError(ref err) => Some(err), &PacketError::MalformedPacket(..) => None, &PacketError::StringEncodeError(ref err) => Some(err), &PacketError::IoError(ref err) => Some(err), &PacketError::TopicNameError(ref err) => Some(err), } } } impl<'a, T: Packet<'a>> From<FixedHeaderError> for PacketError<'a, T> { fn from(err: FixedHeaderError) -> PacketError<'a, T> { PacketError::FixedHeaderError(err) } } impl<'a, T: Packet<'a>> From<VariableHeaderError> for PacketError<'a, T> { fn from(err: VariableHeaderError) -> PacketError<'a, T> { PacketError::VariableHeaderError(err) } } impl<'a, T: Packet<'a>> From<io::Error> for PacketError<'a, T> { fn from(err: io::Error) -> PacketError<'a, T> { PacketError::IoError(err) } } impl<'a, T: Packet<'a>> From<StringEncodeError> for PacketError<'a, T> { fn from(err: StringEncodeError) -> PacketError<'a, T> { PacketError::StringEncodeError(err) } } impl<'a, T: Packet<'a>> From<TopicNameError> for PacketError<'a, T> { fn from(err: TopicNameError) -> PacketError<'a, T> { PacketError::TopicNameError(err) } } macro_rules! impl_variable_packet { ($($name:ident & $errname:ident => $hdr:ident,)+) => { #[derive(Debug, Eq, PartialEq)] pub enum VariablePacket { $( $name($name), )+ } $( impl From<$name> for VariablePacket { fn from(pk: $name) -> VariablePacket { VariablePacket::$name(pk) } } )+ impl<'a> Encodable<'a> for VariablePacket { type Err = VariablePacketError<'a>; fn encode<W: Write>(&self, writer: &mut W) -> Result<(), VariablePacketError<'a>> { match self { $( &VariablePacket::$name(ref pk) => pk.encode(writer).map_err(From::from), )+ } } fn encoded_length(&self) -> u32 { match self { $( &VariablePacket::$name(ref pk) => pk.encoded_length(), )+ } } } impl<'a> Decodable<'a> for VariablePacket { type Err = VariablePacketError<'a>; type Cond = FixedHeader; fn decode_with<R: Read>(reader: &mut R, fixed_header: Option<FixedHeader>) -> Result<VariablePacket, Self::Err> { let fixed_header = match fixed_header { Some(fh) => fh, None => { match FixedHeader::decode(reader) { Ok(header) => header, Err(FixedHeaderError::Unrecognized(code, length)) => { let reader = &mut reader.take(length as u64); let mut buf = Vec::with_capacity(length as usize); try!(reader.read_to_end(&mut buf)); return Err(VariablePacketError::UnrecognizedPacket(code, buf)); }, Err(FixedHeaderError::ReservedType(code, length)) => { let reader = &mut reader.take(length as u64); let mut buf = Vec::with_capacity(length as usize); try!(reader.read_to_end(&mut buf)); return Err(VariablePacketError::ReservedPacket(code, buf)); }, Err(err) => return Err(From::from(err)) } } }; let reader = &mut reader.take(fixed_header.remaining_length as u64); match fixed_header.packet_type.control_type { $( ControlType::$hdr => { let pk = try!(<$name as Packet<'a>>::decode_packet(reader, fixed_header)); Ok(VariablePacket::$name(pk)) } )+ } } } #[derive(Debug)] pub enum VariablePacketError<'a> { FixedHeaderError(FixedHeaderError), UnrecognizedPacket(u8, Vec<u8>), ReservedPacket(u8, Vec<u8>), IoError(io::Error), $( $errname(PacketError<'a, $name>), )+ } impl<'a> From<FixedHeaderError> for VariablePacketError<'a> { fn from(err: FixedHeaderError) -> VariablePacketError<'a> { VariablePacketError::FixedHeaderError(err) } } impl<'a> From<io::Error> for VariablePacketError<'a> { fn from(err: io::Error) -> VariablePacketError<'a> { VariablePacketError::IoError(err) } } $( impl<'a> From<PacketError<'a, $name>> for VariablePacketError<'a> { fn from(err: PacketError<'a, $name>) -> VariablePacketError<'a> { VariablePacketError::$errname(err)<|fim▁hole|> } } )+ impl<'a> fmt::Display for VariablePacketError<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &VariablePacketError::FixedHeaderError(ref err) => err.fmt(f), &VariablePacketError::UnrecognizedPacket(ref code, ref v) => write!(f, "Unrecognized type ({}), [u8, ..{}]", code, v.len()), &VariablePacketError::ReservedPacket(ref code, ref v) => write!(f, "Reserved type ({}), [u8, ..{}]", code, v.len()), &VariablePacketError::IoError(ref err) => err.fmt(f), $( &VariablePacketError::$errname(ref err) => err.fmt(f), )+ } } } impl<'a> Error for VariablePacketError<'a> { fn description(&self) -> &str { match self { &VariablePacketError::FixedHeaderError(ref err) => err.description(), &VariablePacketError::UnrecognizedPacket(..) => "Unrecognized packet", &VariablePacketError::ReservedPacket(..) => "Reserved packet", &VariablePacketError::IoError(ref err) => err.description(), $( &VariablePacketError::$errname(ref err) => err.description(), )+ } } fn cause(&self) -> Option<&Error> { match self { &VariablePacketError::FixedHeaderError(ref err) => Some(err), &VariablePacketError::UnrecognizedPacket(..) => None, &VariablePacketError::ReservedPacket(..) => None, &VariablePacketError::IoError(ref err) => Some(err), $( &VariablePacketError::$errname(ref err) => Some(err), )+ } } } } } impl_variable_packet! { ConnectPacket & ConnectPacketError => Connect, ConnackPacket & ConnackPacketError => ConnectAcknowledgement, PublishPacket & PublishPacketError => Publish, PubackPacket & PubackPacketError => PublishAcknowledgement, PubrecPacket & PubrecPacketError => PublishReceived, PubrelPacket & PubrelPacketError => PublishRelease, PubcompPacket & PubcompPacketError => PublishComplete, PingreqPacket & PingreqPacketError => PingRequest, PingrespPacket & PingrespPacketError => PingResponse, SubscribePacket & SubscribePacketError => Subscribe, SubackPacket & SubackPacketError => SubscribeAcknowledgement, UnsubscribePacket & UnsubscribePacketError => Unsubscribe, UnsubackPacket & UnsubackPacketError => UnsubscribeAcknowledgement, DisconnectPacket & DisconnectPacketError => Disconnect, } impl VariablePacket { pub fn new<T>(t: T) -> VariablePacket where VariablePacket: From<T> { From::from(t) } } #[cfg(test)] mod test { use super::*; use std::io::Cursor; use {Encodable, Decodable}; #[test] fn test_variable_packet_basic() { let packet = ConnectPacket::new("1234".to_owned()); // Wrap it let var_packet = VariablePacket::new(packet); // Encode let mut buf = Vec::new(); var_packet.encode(&mut buf).unwrap(); // Decode let mut decode_buf = Cursor::new(buf); let decoded_packet = VariablePacket::decode(&mut decode_buf).unwrap(); assert_eq!(var_packet, decoded_packet); } }<|fim▁end|>
<|file_name|>aio_dio_bugs.py<|end_file_name|><|fim▁begin|>import os from autotest.client import test, utils # tests is a simple array of "cmd" "arguments" tests = [["aio-dio-invalidate-failure", "poo"], ["aio-dio-subblock-eof-read", "eoftest"], ["aio-free-ring-with-bogus-nr-pages", ""], ["aio-io-setup-with-nonwritable-context-pointer", ""], ["aio-dio-extend-stat", "file"], ] name = 0 arglist = 1 class aio_dio_bugs(test.test): version = 5 preserve_srcdir = True def initialize(self): self.job.require_gcc() self.job.setup_dep(['libaio']) ldflags = '-L ' + self.autodir + '/deps/libaio/lib' cflags = '-I ' + self.autodir + '/deps/libaio/include' self.gcc_flags = ldflags + ' ' + cflags <|fim▁hole|> utils.make('"CFLAGS=' + self.gcc_flags + '"') def execute(self, args = ''): os.chdir(self.tmpdir) libs = self.autodir + '/deps/libaio/lib/' ld_path = utils.prepend_path(libs, utils.environ('LD_LIBRARY_PATH')) var_ld_path = 'LD_LIBRARY_PATH=' + ld_path for test in tests: cmd = self.srcdir + '/' + test[name] + ' ' + args + ' ' \ + test[arglist] utils.system(var_ld_path + ' ' + cmd)<|fim▁end|>
def setup(self): os.chdir(self.srcdir)
<|file_name|>reflect.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /*! Runtime type reflection */ #[allow(missing_doc)]; #[cfg(stage0)] use intrinsic::{Opaque, TyDesc, TyVisitor}; #[cfg(not(stage0))] use unstable::intrinsics::{Opaque, TyDesc, TyVisitor}; use libc::c_void; use sys; use vec; /** * Trait for visitor that wishes to reflect on data. To use this, create a * struct that encapsulates the set of pointers you wish to walk through a * data structure, and implement both `MovePtr` for it as well as `TyVisitor`; * then build a MovePtrAdaptor wrapped around your struct. */ pub trait MovePtr { fn move_ptr(&self, adjustment: &fn(*c_void) -> *c_void); fn push_ptr(&self); fn pop_ptr(&self); } /// Helper function for alignment calculation. #[inline] pub fn align(size: uint, align: uint) -> uint { ((size + align) - 1u) & !(align - 1u) } /// Adaptor to wrap around visitors implementing MovePtr. pub struct MovePtrAdaptor<V> { inner: V } pub fn MovePtrAdaptor<V:TyVisitor + MovePtr>(v: V) -> MovePtrAdaptor<V> { MovePtrAdaptor { inner: v } } impl<V:TyVisitor + MovePtr> MovePtrAdaptor<V> { #[inline] pub fn bump(&self, sz: uint) { do self.inner.move_ptr() |p| { ((p as uint) + sz) as *c_void }; } #[inline] pub fn align(&self, a: uint) { do self.inner.move_ptr() |p| { align(p as uint, a) as *c_void }; } #[inline] pub fn align_to<T>(&self) { self.align(sys::min_align_of::<T>()); } #[inline] pub fn bump_past<T>(&self) { self.bump(sys::size_of::<T>()); } } /// Abstract type-directed pointer-movement using the MovePtr trait impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> { fn visit_bot(&self) -> bool { self.align_to::<()>(); if ! self.inner.visit_bot() { return false; } self.bump_past::<()>(); true } fn visit_nil(&self) -> bool { self.align_to::<()>(); if ! self.inner.visit_nil() { return false; } self.bump_past::<()>(); true } fn visit_bool(&self) -> bool { self.align_to::<bool>(); if ! self.inner.visit_bool() { return false; } self.bump_past::<bool>(); true } fn visit_int(&self) -> bool { self.align_to::<int>(); if ! self.inner.visit_int() { return false; } self.bump_past::<int>(); true } fn visit_i8(&self) -> bool { self.align_to::<i8>(); if ! self.inner.visit_i8() { return false; } self.bump_past::<i8>(); true } fn visit_i16(&self) -> bool { self.align_to::<i16>(); if ! self.inner.visit_i16() { return false; } self.bump_past::<i16>(); true } fn visit_i32(&self) -> bool { self.align_to::<i32>(); if ! self.inner.visit_i32() { return false; } self.bump_past::<i32>(); true } fn visit_i64(&self) -> bool { self.align_to::<i64>(); if ! self.inner.visit_i64() { return false; } self.bump_past::<i64>(); true } fn visit_uint(&self) -> bool { self.align_to::<uint>(); if ! self.inner.visit_uint() { return false; } self.bump_past::<uint>(); true } fn visit_u8(&self) -> bool { self.align_to::<u8>(); if ! self.inner.visit_u8() { return false; } self.bump_past::<u8>(); true } fn visit_u16(&self) -> bool { self.align_to::<u16>(); if ! self.inner.visit_u16() { return false; } self.bump_past::<u16>(); true } fn visit_u32(&self) -> bool { self.align_to::<u32>(); if ! self.inner.visit_u32() { return false; } self.bump_past::<u32>(); true } fn visit_u64(&self) -> bool { self.align_to::<u64>(); if ! self.inner.visit_u64() { return false; } self.bump_past::<u64>(); true } fn visit_float(&self) -> bool { self.align_to::<float>(); if ! self.inner.visit_float() { return false; } self.bump_past::<float>(); true } fn visit_f32(&self) -> bool { self.align_to::<f32>(); if ! self.inner.visit_f32() { return false; } self.bump_past::<f32>(); true } fn visit_f64(&self) -> bool { self.align_to::<f64>(); if ! self.inner.visit_f64() { return false; } self.bump_past::<f64>(); true } fn visit_char(&self) -> bool { self.align_to::<char>(); if ! self.inner.visit_char() { return false; } self.bump_past::<char>(); true } fn visit_str(&self) -> bool {<|fim▁hole|> } fn visit_estr_box(&self) -> bool { self.align_to::<@str>(); if ! self.inner.visit_estr_box() { return false; } self.bump_past::<@str>(); true } fn visit_estr_uniq(&self) -> bool { self.align_to::<~str>(); if ! self.inner.visit_estr_uniq() { return false; } self.bump_past::<~str>(); true } fn visit_estr_slice(&self) -> bool { self.align_to::<&'static str>(); if ! self.inner.visit_estr_slice() { return false; } self.bump_past::<&'static str>(); true } fn visit_estr_fixed(&self, n: uint, sz: uint, align: uint) -> bool { self.align(align); if ! self.inner.visit_estr_fixed(n, sz, align) { return false; } self.bump(sz); true } fn visit_box(&self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<@u8>(); if ! self.inner.visit_box(mtbl, inner) { return false; } self.bump_past::<@u8>(); true } fn visit_uniq(&self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<~u8>(); if ! self.inner.visit_uniq(mtbl, inner) { return false; } self.bump_past::<~u8>(); true } #[cfg(not(stage0))] fn visit_uniq_managed(&self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<~u8>(); if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; } self.bump_past::<~u8>(); true } fn visit_ptr(&self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<*u8>(); if ! self.inner.visit_ptr(mtbl, inner) { return false; } self.bump_past::<*u8>(); true } fn visit_rptr(&self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<&'static u8>(); if ! self.inner.visit_rptr(mtbl, inner) { return false; } self.bump_past::<&'static u8>(); true } fn visit_unboxed_vec(&self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<vec::UnboxedVecRepr>(); if ! self.inner.visit_vec(mtbl, inner) { return false; } true } fn visit_vec(&self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<~[u8]>(); if ! self.inner.visit_vec(mtbl, inner) { return false; } self.bump_past::<~[u8]>(); true } fn visit_evec_box(&self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<@[u8]>(); if ! self.inner.visit_evec_box(mtbl, inner) { return false; } self.bump_past::<@[u8]>(); true } fn visit_evec_uniq(&self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<~[u8]>(); if ! self.inner.visit_evec_uniq(mtbl, inner) { return false; } self.bump_past::<~[u8]>(); true } fn visit_evec_slice(&self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<&'static [u8]>(); if ! self.inner.visit_evec_slice(mtbl, inner) { return false; } self.bump_past::<&'static [u8]>(); true } fn visit_evec_fixed(&self, n: uint, sz: uint, align: uint, mtbl: uint, inner: *TyDesc) -> bool { self.align(align); if ! self.inner.visit_evec_fixed(n, sz, align, mtbl, inner) { return false; } self.bump(sz); true } fn visit_enter_rec(&self, n_fields: uint, sz: uint, align: uint) -> bool { self.align(align); if ! self.inner.visit_enter_rec(n_fields, sz, align) { return false; } true } fn visit_rec_field(&self, i: uint, name: &str, mtbl: uint, inner: *TyDesc) -> bool { unsafe { self.align((*inner).align); } if ! self.inner.visit_rec_field(i, name, mtbl, inner) { return false; } unsafe { self.bump((*inner).size); } true } fn visit_leave_rec(&self, n_fields: uint, sz: uint, align: uint) -> bool { if ! self.inner.visit_leave_rec(n_fields, sz, align) { return false; } true } fn visit_enter_class(&self, n_fields: uint, sz: uint, align: uint) -> bool { self.align(align); if ! self.inner.visit_enter_class(n_fields, sz, align) { return false; } true } fn visit_class_field(&self, i: uint, name: &str, mtbl: uint, inner: *TyDesc) -> bool { unsafe { self.align((*inner).align); } if ! self.inner.visit_class_field(i, name, mtbl, inner) { return false; } unsafe { self.bump((*inner).size); } true } fn visit_leave_class(&self, n_fields: uint, sz: uint, align: uint) -> bool { if ! self.inner.visit_leave_class(n_fields, sz, align) { return false; } true } fn visit_enter_tup(&self, n_fields: uint, sz: uint, align: uint) -> bool { self.align(align); if ! self.inner.visit_enter_tup(n_fields, sz, align) { return false; } true } fn visit_tup_field(&self, i: uint, inner: *TyDesc) -> bool { unsafe { self.align((*inner).align); } if ! self.inner.visit_tup_field(i, inner) { return false; } unsafe { self.bump((*inner).size); } true } fn visit_leave_tup(&self, n_fields: uint, sz: uint, align: uint) -> bool { if ! self.inner.visit_leave_tup(n_fields, sz, align) { return false; } true } fn visit_enter_fn(&self, purity: uint, proto: uint, n_inputs: uint, retstyle: uint) -> bool { if ! self.inner.visit_enter_fn(purity, proto, n_inputs, retstyle) { return false } true } fn visit_fn_input(&self, i: uint, mode: uint, inner: *TyDesc) -> bool { if ! self.inner.visit_fn_input(i, mode, inner) { return false; } true } fn visit_fn_output(&self, retstyle: uint, inner: *TyDesc) -> bool { if ! self.inner.visit_fn_output(retstyle, inner) { return false; } true } fn visit_leave_fn(&self, purity: uint, proto: uint, n_inputs: uint, retstyle: uint) -> bool { if ! self.inner.visit_leave_fn(purity, proto, n_inputs, retstyle) { return false; } true } fn visit_enter_enum(&self, n_variants: uint, get_disr: extern unsafe fn(ptr: *Opaque) -> int, sz: uint, align: uint) -> bool { self.align(align); if ! self.inner.visit_enter_enum(n_variants, get_disr, sz, align) { return false; } true } fn visit_enter_enum_variant(&self, variant: uint, disr_val: int, n_fields: uint, name: &str) -> bool { if ! self.inner.visit_enter_enum_variant(variant, disr_val, n_fields, name) { return false; } true } fn visit_enum_variant_field(&self, i: uint, offset: uint, inner: *TyDesc) -> bool { self.inner.push_ptr(); self.bump(offset); if ! self.inner.visit_enum_variant_field(i, offset, inner) { return false; } self.inner.pop_ptr(); true } fn visit_leave_enum_variant(&self, variant: uint, disr_val: int, n_fields: uint, name: &str) -> bool { if ! self.inner.visit_leave_enum_variant(variant, disr_val, n_fields, name) { return false; } true } fn visit_leave_enum(&self, n_variants: uint, get_disr: extern unsafe fn(ptr: *Opaque) -> int, sz: uint, align: uint) -> bool { if ! self.inner.visit_leave_enum(n_variants, get_disr, sz, align) { return false; } self.bump(sz); true } fn visit_trait(&self) -> bool { self.align_to::<@TyVisitor>(); if ! self.inner.visit_trait() { return false; } self.bump_past::<@TyVisitor>(); true } fn visit_var(&self) -> bool { if ! self.inner.visit_var() { return false; } true } fn visit_var_integral(&self) -> bool { if ! self.inner.visit_var_integral() { return false; } true } fn visit_param(&self, i: uint) -> bool { if ! self.inner.visit_param(i) { return false; } true } fn visit_self(&self) -> bool { self.align_to::<&'static u8>(); if ! self.inner.visit_self() { return false; } self.align_to::<&'static u8>(); true } fn visit_type(&self) -> bool { if ! self.inner.visit_type() { return false; } true } fn visit_opaque_box(&self) -> bool { self.align_to::<@u8>(); if ! self.inner.visit_opaque_box() { return false; } self.bump_past::<@u8>(); true } fn visit_constr(&self, inner: *TyDesc) -> bool { if ! self.inner.visit_constr(inner) { return false; } true } fn visit_closure_ptr(&self, ck: uint) -> bool { self.align_to::<@fn()>(); if ! self.inner.visit_closure_ptr(ck) { return false; } self.bump_past::<@fn()>(); true } }<|fim▁end|>
self.align_to::<~str>(); if ! self.inner.visit_str() { return false; } self.bump_past::<~str>(); true
<|file_name|>RealisticBot.java<|end_file_name|><|fim▁begin|>package com.stf.bj.app.game.players; import java.util.Random; import com.stf.bj.app.game.bj.Spot; import com.stf.bj.app.game.server.Event; import com.stf.bj.app.game.server.EventType; import com.stf.bj.app.settings.AppSettings; import com.stf.bj.app.strategy.FullStrategy; public class RealisticBot extends BasicBot { private enum InsuranceStrategy { NEVER, EVEN_MONEY_ONLY, GOOD_HANDS_ONLY, RARELY, OFTEN; } private enum PlayAbility { PERFECT, NOOB, RANDOM; } private enum BetChanging { NEVER, RANDOM; } private final InsuranceStrategy insuranceStrategy; private final PlayAbility playAbility; private final Random r; private double wager = -1.0; private final BetChanging betChanging; private boolean insurancePlay = false; private boolean calculatedInsurancePlay = false; private boolean messUpNextPlay = false; public RealisticBot(AppSettings settings, FullStrategy strategy, Random r, Spot s) { super(settings, strategy, r, s); if (r == null) { r = new Random(System.currentTimeMillis()); } this.r = r; insuranceStrategy = InsuranceStrategy.values()[r.nextInt(InsuranceStrategy.values().length)]; playAbility = PlayAbility.values()[r.nextInt(PlayAbility.values().length)]; setBaseBet(); betChanging = BetChanging.RANDOM; } @Override public void sendEvent(Event e) { super.sendEvent(e); if (e.getType() == EventType.DECK_SHUFFLED) { if (betChanging == BetChanging.RANDOM && r.nextInt(2) == 0) { wager = 5; } } } @Override public double getWager() { if (delay > 0) { delay--; return -1; } return wager; } private void setBaseBet() { int rInt = r.nextInt(12); if (rInt < 6) { wager = 5.0 * (1 + rInt); } else if (rInt < 9) { wager = 10.0; } else { wager = 5.0; } } @Override public boolean getInsurancePlay() { if (insuranceStrategy == InsuranceStrategy.NEVER) { return false; } if (!calculatedInsurancePlay) { insurancePlay = calculateInsurancePlay(); calculatedInsurancePlay = true; } return insurancePlay; } private boolean calculateInsurancePlay() { switch (insuranceStrategy) { case EVEN_MONEY_ONLY: return getHandSoftTotal(0) == 21; case GOOD_HANDS_ONLY: return getHandSoftTotal(0) > 16 + r.nextInt(3); case OFTEN: return r.nextInt(2) == 0; case RARELY: return r.nextInt(5) == 0; default: throw new IllegalStateException(); } } @Override public Play getMove(int handIndex, boolean canDouble, boolean canSplit, boolean canSurrender) { if (delay > 0) { delay--; return null; } Play play = super.getMove(handIndex, canDouble, canSplit, canSurrender); if (messUpNextPlay) { if (play != Play.SPLIT && canSplit) { play = Play.SPLIT; } else if (play != Play.DOUBLEDOWN && canSplit) { play = Play.DOUBLEDOWN; } else if (play == Play.STAND) { play = Play.HIT; } else {<|fim▁hole|> return play; } @Override protected void reset() { super.reset(); calculatedInsurancePlay = false; int random = r.nextInt(10); if (playAbility == PlayAbility.RANDOM) { messUpNextPlay = (random < 2); } if ((random == 2 || random == 3) && betChanging == BetChanging.RANDOM) { wager += 5; } else if (random == 4 && betChanging == BetChanging.RANDOM) { if (wager > 6) { wager -= 5; } } } }<|fim▁end|>
play = Play.STAND; } }
<|file_name|>promise.js<|end_file_name|><|fim▁begin|>/** * Copyright 2016 The AMP HTML Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Returns a Deferred struct, which holds a pending promise and its associated * resolve and reject functions. * * This is preferred instead of creating a Promise instance to extract the * resolve/reject functions yourself: * * ``` * // Avoid doing * let resolve; * const promise = new Promise(res => { * resolve = res; * }); * * // Good * const deferred = new Deferred(); * const { promise, resolve } = deferred; * ``` * * @template T */ export class Deferred { /** * Creates an instance of Deferred. */ constructor() { let resolve, reject; /** * @const {!Promise<T>} */ this.promise = new /*OK*/Promise((res, rej) => { resolve = res; reject = rej; }); /** * @const {function(T=)} */ this.resolve = resolve; /** * @const {function(*=)} */ this.reject = reject; } } /** * Creates a promise resolved to the return value of fn. * If fn sync throws, it will cause the promise to reject. * * @param {function():T} fn * @return {!Promise<T>} * @template T */ export function tryResolve(fn) { return new Promise(resolve => { resolve(fn()); }); } /** * Returns a promise which resolves if a threshold amount of the given promises * resolve, and rejects otherwise. * @param {!Array<!Promise>} promises The array of promises to test. * @param {number} count The number of promises that must resolve for the * returned promise to resolve. * @return {!Promise} A promise that resolves if any of the given promises * resolve, and which rejects otherwise. */ export function some(promises, count = 1) { return new Promise((resolve, reject) => { count = Math.max(count, 0); const extra = promises.length - count; if (extra < 0) { reject(new Error('not enough promises to resolve')); } if (promises.length == 0) { resolve([]); } const values = []; const reasons = []; const onFulfilled = value => { if (values.length < count) { values.push(value); } if (values.length == count) { resolve(values); } }; const onRejected = reason => { if (reasons.length <= extra) { reasons.push(reason); } if (reasons.length > extra) { reject(reasons); } }; for (let i = 0; i < promises.length; i++) { Promise.resolve(promises[i]).then(onFulfilled, onRejected); } }); } /** * Resolves with the result of the last promise added. * @implements {IThenable} */ export class LastAddedResolver { /** * @param {!Array<!Promise>=} opt_promises */ constructor(opt_promises) { let resolve_, reject_; /** @private @const {!Promise} */ this.promise_ = new Promise((resolve, reject) => { resolve_ = resolve; reject_ = reject; }); /** @private */ this.resolve_ = resolve_; /** @private */ this.reject_ = reject_; /** @private */ this.count_ = 0; if (opt_promises) { for (let i = 0; i < opt_promises.length; i++) { this.add(opt_promises[i]); } } } /** * Add a promise to possibly be resolved. * @param {!Promise} promise * @return {!Promise} */ add(promise) { const countAtAdd = ++this.count_; Promise.resolve(promise).then(result => { if (this.count_ === countAtAdd) { this.resolve_(result);<|fim▁hole|> // this will only reject when most recently added promise fails. if (this.count_ === countAtAdd) { this.reject_(error); } }); return this.promise_; } /** @override */ then(opt_resolve, opt_reject) { return this.promise_.then(opt_resolve, opt_reject); } }<|fim▁end|>
} }, error => { // Don't follow behavior of Promise.all and Promise.race error so that
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
import SocialShare from './SocialShare' export default SocialShare
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django_google_dork.models import model_utils.fields import django.utils.timezone <|fim▁hole|>class Migration(migrations.Migration): replaces = [('django_google_dork', '0001_initial'), ('django_google_dork', '0002_auto_20141116_1551'), ('django_google_dork', '0003_run_engine')] dependencies = [ ] operations = [ migrations.CreateModel( name='Campaign', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)), ('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)), ('name', django_google_dork.models.CampaignNameField(unique=True, max_length=32)), ], options={ 'abstract': False, }, bases=(models.Model,), ), migrations.CreateModel( name='Dork', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)), ('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)), ('query', django_google_dork.models.DorkQueryField(max_length=256)), ('campaign', models.ForeignKey(to='django_google_dork.Campaign')), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='Result', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=1024)), ('summary', models.TextField()), ('url', models.URLField(max_length=1024)), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='Run', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(auto_now_add=True)), ('dork', models.ForeignKey(to='django_google_dork.Dork')), ('result_set', models.ManyToManyField(to='django_google_dork.Result')), ], options={ }, bases=(models.Model,), ), migrations.AlterUniqueTogether( name='result', unique_together=set([('title', 'summary', 'url')]), ), migrations.AlterUniqueTogether( name='dork', unique_together=set([('campaign', 'query')]), ), migrations.CreateModel( name='SearchEngine', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('hostname', models.CharField(unique=True, max_length=32)), ], options={ }, bases=(models.Model,), ), migrations.AddField( model_name='campaign', name='enabled', field=models.BooleanField(default=True), preserve_default=True, ), migrations.AddField( model_name='dork', name='enabled', field=models.BooleanField(default=True), preserve_default=True, ), migrations.AddField( model_name='run', name='engine', field=models.ForeignKey(default=None, to='django_google_dork.SearchEngine'), preserve_default=False, ), ]<|fim▁end|>
<|file_name|>ThumbnailCheckLibrary.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2013 University of Dundee & Open Microscopy Environment. * All rights reserved. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ package org.openmicroscopy.shoola.keywords; import java.awt.Component; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.awt.image.Raster; import java.awt.image.RenderedImage;<|fim▁hole|>import java.util.NoSuchElementException; import javax.swing.JPanel; import org.robotframework.abbot.finder.BasicFinder; import org.robotframework.abbot.finder.ComponentNotFoundException; import org.robotframework.abbot.finder.Matcher; import org.robotframework.abbot.finder.MultipleComponentsFoundException; import com.google.common.hash.Hasher; import com.google.common.hash.Hashing; /** * Robot Framework SwingLibrary keyword library offering methods for checking thumbnails. * @author [email protected] * @since 4.4.9 */ public class ThumbnailCheckLibrary { /** Allow Robot Framework to instantiate this library only once. */ public static final String ROBOT_LIBRARY_SCOPE = "GLOBAL"; /** * An iterator over the integer pixel values of a rendered image, * first increasing <em>x</em>, then <em>y</em> when <em>x</em> wraps back to 0. * This is written so as to be scalable over arbitrary image sizes * and to not cause heap allocations during the iteration. * @author [email protected] * @since 4.4.9 */ private static class IteratorIntPixel { final Raster raster; final int width; final int height; final int[] pixel = new int[1]; int x = 0; int y = 0; /** * Create a new pixel iterator for the given image. * The image is assumed to be of a type that packs data for each pixel into an <code>int</code>. * @param image the image over whose pixels to iterate */ IteratorIntPixel(RenderedImage image) { this.raster = image.getData(); this.width = image.getWidth(); this.height = image.getHeight(); } /** * @return if any pixels remain to be read with {@link #next()} */ boolean hasNext() { return y < height; } /** * @return the next pixel * @throws NoSuchElementException if no more pixels remain */ int next() { if (!hasNext()) { throw new NoSuchElementException(); } raster.getDataElements(x, y, pixel); if (++x == width) { x = 0; ++y; } return pixel[0]; } } /** * Find the thumbnail <code>Component</code> in the AWT hierarchy. * @param panelType if the thumbnail should be the whole <code>"image node"</code> or just its <code>"thumbnail"</code> canvas * @param imageFilename the name of the image whose thumbnail is to be rasterized * @return the AWT <code>Component</code> for the thumbnail * @throws MultipleComponentsFoundException if multiple thumbnails are for the given image name * @throws ComponentNotFoundException if no thumbnails are for the given image name */ private static Component componentFinder(final String panelType, final String imageFilename) throws ComponentNotFoundException, MultipleComponentsFoundException { return new BasicFinder().find(new Matcher() { private final String soughtName = panelType + " for " + imageFilename; public boolean matches(Component component) { return component instanceof JPanel && this.soughtName.equals(component.getName()); }}); } /** * Convert the thumbnail for the image of the given filename into rasterized pixel data. * Each pixel is represented by an <code>int</code>. * @param panelType if the thumbnail should be the whole <code>"image node"</code> or just its <code>"thumbnail"</code> canvas * @param imageFilename the name of the image whose thumbnail is to be rasterized * @return the image on the thumbnail * @throws MultipleComponentsFoundException if multiple thumbnails are for the given image name * @throws ComponentNotFoundException if no thumbnails are for the given image name */ private static RenderedImage captureImage(final String panelType, final String imageFilename) throws ComponentNotFoundException, MultipleComponentsFoundException { final JPanel thumbnail = (JPanel) componentFinder(panelType, imageFilename); final int width = thumbnail.getWidth(); final int height = thumbnail.getHeight(); final BufferedImage image = new BufferedImage(width, height, StaticFieldLibrary.IMAGE_TYPE); final Graphics2D graphics = image.createGraphics(); if (graphics == null) { throw new RuntimeException("thumbnail is not displayable"); } thumbnail.paint(graphics); graphics.dispose(); return image; } /** * <table> * <td>Get Thumbnail Border Color</td> * <td>name of image whose thumbnail is queried</td> * </table> * @param imageFilename the name of the image * @return the color of the thumbnail's corner pixel * @throws MultipleComponentsFoundException if multiple thumbnails exist for the given name * @throws ComponentNotFoundException if no thumbnails exist for the given name */ public String getThumbnailBorderColor(String imageFilename) throws ComponentNotFoundException, MultipleComponentsFoundException { final RenderedImage image = captureImage("image node", imageFilename); final IteratorIntPixel pixels = new IteratorIntPixel(image); if (!pixels.hasNext()) { throw new RuntimeException("image node has no pixels"); } return Integer.toHexString(pixels.next()); } /** * <table> * <td>Is Thumbnail Monochromatic</td> * <td>name of image whose thumbnail is queried</td> * </table> * @param imageFilename the name of the image * @return if the image's thumbnail canvas is solidly one color * @throws MultipleComponentsFoundException if multiple thumbnails exist for the given name * @throws ComponentNotFoundException if no thumbnails exist for the given name */ public boolean isThumbnailMonochromatic(String imageFilename) throws ComponentNotFoundException, MultipleComponentsFoundException { final RenderedImage image = captureImage("thumbnail", imageFilename); final IteratorIntPixel pixels = new IteratorIntPixel(image); if (!pixels.hasNext()) { throw new RuntimeException("thumbnail image has no pixels"); } final int oneColor = pixels.next(); while (pixels.hasNext()) { if (pixels.next() != oneColor) { return false; } } return true; } /** * <table> * <td>Get Thumbnail Hash</td> * <td>name of image whose thumbnail is queried</td> * </table> * @param imageFilename the name of the image * @return the hash of the thumbnail canvas image * @throws MultipleComponentsFoundException if multiple thumbnails exist for the given name * @throws ComponentNotFoundException if no thumbnails exist for the given name */ public String getThumbnailHash(String imageFilename) throws ComponentNotFoundException, MultipleComponentsFoundException { final RenderedImage image = captureImage("thumbnail", imageFilename); final IteratorIntPixel pixels = new IteratorIntPixel(image); final Hasher hasher = Hashing.goodFastHash(128).newHasher(); while (pixels.hasNext()) { hasher.putInt(pixels.next()); } return hasher.hash().toString(); } /** * <table> * <td>Get Name Of Thumbnail For Image</td> * <td>name of image whose thumbnail is queried</td> * </table> * @param imageFilename the name of the image * @return the return value of the corresponding <code>ThumbnailCanvas.getName()</code> * @throws MultipleComponentsFoundException if multiple thumbnails exist for the given name * @throws ComponentNotFoundException if no thumbnails exist for the given name */ public String getNameOfThumbnailForImage(final String imageFilename) throws ComponentNotFoundException, MultipleComponentsFoundException { return componentFinder("thumbnail", imageFilename).getName(); } }<|fim▁end|>
<|file_name|>cmdcache.py<|end_file_name|><|fim▁begin|># Built-in imports import os, re, inspect, keyword # Maya imports import maya.cmds as cmds import maya.mel as mm # PyMEL imports import pymel.util as util import pymel.versions as versions # Module imports from . import plogging from . import startup _logger = plogging.getLogger(__name__) moduleNameShortToLong = { 'modeling' : 'Modeling', 'rendering' : 'Rendering', 'effects' : 'Effects', 'animation' : 'Animation', 'windows' : 'Windows', 'system' : 'System', 'general' : 'General', 'language' : 'Language' } #: these are commands which need to be manually added to the list parsed from the docs moduleCommandAdditions = { 'windows' : ['connectControl', 'deleteUI','uiTemplate','setUITemplate','renameUI','setParent','objectTypeUI','lsUI', 'disable', 'dimWhen'], 'general' : ['encodeString', 'format', 'assignCommand', 'commandEcho', 'condition', 'evalDeferred', 'isTrue', 'itemFilter', 'itemFilterAttr', 'itemFilterRender', 'itemFilterType', 'pause', 'refresh', 'stringArrayIntersector', 'selectionConnection'] } #: secondary flags can only be used in conjunction with other flags so we must exclude them when creating classes from commands. #: because the maya docs do not specify in any parsable way which flags are secondary modifiers, we must maintain this dictionary. #: once this list is reliable enough and includes default values, we can use them as keyword arguments in the class methods that they modify. secondaryFlags = { 'xform' : ( ( 'absolute', None,[] ), ( 'relative', None,[] ), ( 'euler', None,['relative'] ), ( 'objectSpace', True, ['scalePivot', 'rotatePivot', 'rotateAxis', 'rotation', 'rotateTranslation', 'translation', 'matrix', 'boundingBox', 'boundingBoxInvisible', 'pivots'] ), ( 'worldSpace', False, ['scalePivot', 'rotatePivot', 'rotateAxis', 'rotation', 'rotateTranslation', 'translation', 'matrix', 'boundingBox', 'boundingBoxInvisible', 'pivots'] ), ( 'preserve', None,['scalePivot', 'rotatePivot', 'rotateOrder', 'rotateAxis', 'centerPivots'] ), ( 'worldSpaceDistance', None,['scalePivot', 'rotatePivot', 'scaleTranslation', 'rotateTranslation', 'translation', 'pivots'] ) ), 'file' : ( ( 'loadAllDeferred', False, ['open'] ), ( 'loadNoReferences', False, ['open', 'i', 'reference', 'loadReference'] ), ( 'loadReferenceDepth', None, ['open', 'i', 'reference', 'loadReference'] ), ( 'force', False, ['open', 'newFile', 'save', 'exportAll', 'exportSelected', 'exportAnim', 'exportSelectedAnim', 'exportAnimFromReference', 'exportSelectedAnimFromReference' ] ), ( 'constructionHistory', True, ['exportSelected'] ), ( 'channels', True, ['exportSelected'] ), ( 'constraints', True, ['exportSelected'] ), ( 'expressions', True, ['exportSelected'] ), ( 'shader', True, ['exportSelected'] ), ( 'defaultNamespace', False, ['reference', 'i'] ), ( 'deferReference', False, ['reference', 'i'] ), ( 'editCommand', None, ['cleanReference'] ), ( 'groupReference', False, ['reference', 'i'] ), ( 'groupLocator', None,['reference'] ), ( 'groupName', None,['reference', 'i'] ), ( 'namespace', None,['reference', 'exportAsReference', 'namespace'] ), ( 'referenceNode', None,['reference', 'exportAnimFromReference', 'exportSelectedAnimFromReference'] ), ( 'renameAll', None,['i'] ), ( 'renamingPrefix', None,['reference', 'i','exportAsReference'] ), #( 'saveTextures', "unlessRef", ['saveAs']), ( 'swapNamespace', None, ['reference', 'i'] ), ( 'sharedReferenceFile', None, ['reference'] ), ( 'sharedNodes', None, ['reference'] ), ( 'returnNewNodes', False, ['open', 'reference', 'i', 'loadReference' ] ), #( 'loadSettings', ), ( 'preserveReferences', False, ['i', 'exportAll', 'exportSelected'] ), ( 'preSaveScript', None, ['save'] ), ( 'postSaveScript', None, ['save'] ), ( 'type', None, ['open', 'newFile', 'save', 'exportAll', 'exportSelected', 'exportAnim', 'exportSelectedAnim', 'exportAnimFromReference', 'exportSelectedAnimFromReference' ] ), ), 'joint' : ( ( 'absolute', True, ['position'] ), ( 'relative', True, ['position'] ) ) }<|fim▁hole|>UI_COMMANDS ="""attrColorSliderGrp attrControlGrp attrEnumOptionMenu attrEnumOptionMenuGrp attrFieldGrp attrFieldSliderGrp attrNavigationControlGrp attributeMenu colorIndexSliderGrp colorSliderButtonGrp colorSliderGrp columnLayout colorEditor floatField floatFieldGrp floatScrollBar floatSlider floatSlider2 floatSliderButtonGrp floatSliderGrp frameLayout iconTextButton iconTextCheckBox iconTextRadioButton iconTextRadioCollection iconTextScrollList iconTextStaticLabel intField intFieldGrp intScrollBar intSlider intSliderGrp paneLayout panel radioButton radioButtonGrp radioCollection radioMenuItemCollection symbolButton symbolCheckBox textCurves textField textFieldButtonGrp textFieldGrp text textScrollList toolButton toolCollection window blendShapeEditor blendShapePanel button checkBox checkBoxGrp confirmDialog fontDialog formLayout menu menuBarLayout menuEditor menuItem menuSet promptDialog scrollField scrollLayout scriptedPanel scriptedPanelType shelfButton shelfLayout shelfTabLayout tabLayout outlinerEditor optionMenu outlinerPanel optionMenuGrp animCurveEditor animDisplay separator visor layout layoutDialog layerButton hyperGraph hyperPanel hyperShade rowColumnLayout rowLayout renderLayerButton renderWindowEditor glRenderEditor scriptTable keyframeStats keyframeOutliner canvas channelBox gradientControl gradientControlNoAttr gridLayout messageLine popupMenu modelEditor modelPanel helpLine hardwareRenderPanel image nodeIconButton commandLine progressBar defaultLightListCheckBox exclusiveLightCheckBox shellField clipSchedulerOutliner clipEditor deviceEditor devicePanel dynRelEdPanel dynRelEditor dynPaintEditor nameField cmdScrollFieldExecuter cmdScrollFieldReporter cmdShell nameField palettePort """.split() #: creation commands whose names do not match the type of node they return require this dict #: to resolve which command the class should wrap nodeTypeToNodeCommand = { #'failed' : 'clip', #'failed' : 'clipSchedule', 'airField' : 'air', 'dragField' : 'drag', 'emitter' : 'emitter', 'turbulenceField' : 'turbulence', #'failed' : 'effector', 'volumeAxisField' : 'volumeAxis', 'uniformField' : 'uniform', 'gravityField' : 'gravity', #'failed' : 'event', #'failed' : 'pointCurveConstraint', #'failed' : 'deformer', #'failed' : 'constrain', 'locator' : 'spaceLocator', 'vortexField' : 'vortex', 'makeNurbTorus' : 'torus', 'makeNurbCone' : 'cone', 'makeNurbCylinder' : 'cylinder', 'nurbsCurve' : 'curve', # returns a single transform, but creates a nurbsCurve 'makeNurbSphere' : 'sphere', 'makeNurbCircle' : 'circle', 'makeNurbPlane' : 'nurbsPlane', 'makeNurbsSquare' : 'nurbsSquare', 'makeNurbCube' : 'nurbsCube', 'skinPercent' : 'skinCluster', 'file' : None, # prevent File node from using cmds.file 'nurbsSurface' : 'surface', 'annotationShape' : 'annotate', 'condition' : None, # prevent Condition node from using cmds.condition (which is for script conditions) } cmdlistOverrides = {} #util.setCascadingDictItem( cmdlistOverrides, ( 'optionMenu', 'shortFlags', 'sl', 'modes' ), ['create', 'query', 'edit'] ) util.setCascadingDictItem( cmdlistOverrides, ( 'optionMenu', 'flags', 'select', 'modes' ), ['create', 'query', 'edit'] ) util.setCascadingDictItem( cmdlistOverrides, ( 'ikHandle', 'flags', 'jointList', 'modes' ), ['query'] ) #util.setCascadingDictItem( cmdlistOverrides, ( 'ikHandle', 'shortFlags', 'jl', 'modes' ), ['query'] ) util.setCascadingDictItem( cmdlistOverrides, ( 'keyframe', 'flags', 'index', 'args' ), 'timeRange' ) # make sure this is a time range so it gets proper slice syntax # Need to override this, rather than having it deteced from testNodeCmd, because # it crashes testNodeCmd util.setCascadingDictItem( cmdlistOverrides, ( 'pointOnPolyConstraint', 'resultNeedsUnpacking', ), True ) def getCmdInfoBasic( command ): typemap = { 'string' : unicode, 'length' : float, 'float' : float, 'angle' : float, 'int' : int, 'unsignedint' : int, 'on|off' : bool, 'script' : callable, 'name' : 'PyNode' } flags = {} shortFlags = {} removedFlags = {} try: lines = cmds.help( command ).split('\n') except RuntimeError: pass else: synopsis = lines.pop(0) # certain commands on certain platforms have an empty first line if not synopsis: synopsis = lines.pop(0) #_logger.debug(synopsis) if lines: lines.pop(0) # 'Flags' #_logger.debug(lines) for line in lines: line = line.replace( '(Query Arg Mandatory)', '' ) line = line.replace( '(Query Arg Optional)', '' ) tokens = line.split() try: tokens.remove('(multi-use)') multiuse = True except ValueError: multiuse = False #_logger.debug(tokens) if len(tokens) > 1 and tokens[0].startswith('-'): args = [ typemap.get(x.lower(), util.uncapitalize(x) ) for x in tokens[2:] ] numArgs = len(args) # lags with no args in mel require a boolean val in python if numArgs == 0: args = bool # numArgs will stay at 0, which is the number of mel arguments. # this flag should be renamed to numMelArgs #numArgs = 1 elif numArgs == 1: args = args[0] longname = str(tokens[1][1:]) shortname = str(tokens[0][1:]) if longname in keyword.kwlist: removedFlags[ longname ] = shortname longname = shortname elif shortname in keyword.kwlist: removedFlags[ shortname ] = longname shortname = longname #sometimes the longname is empty, so we'll use the shortname for both elif longname == '': longname = shortname flags[longname] = { 'longname' : longname, 'shortname' : shortname, 'args' : args, 'numArgs' : numArgs, 'docstring' : '' } if multiuse: flags[longname].setdefault('modes', []).append('multiuse') shortFlags[shortname] = longname #except: # pass #_logger.debug("could not retrieve command info for", command) res = { 'flags': flags, 'shortFlags': shortFlags, 'description' : '', 'example': '', 'type' : 'other' } if removedFlags: res['removedFlags'] = removedFlags return res def getCmdInfo( command, version, python=True ): """Since many maya Python commands are builtins we can't get use getargspec on them. besides most use keyword args that we need the precise meaning of ( if they can be be used with edit or query flags, the shortnames of flags, etc) so we have to parse the maya docs""" from parsers import CommandDocParser, mayaDocsLocation basicInfo = getCmdInfoBasic(command) try: docloc = mayaDocsLocation(version) if python: docloc = os.path.join( docloc , 'CommandsPython/%s.html' % (command) ) else: docloc = os.path.join( docloc , 'Commands/%s.html' % (command) ) f = open( docloc ) parser = CommandDocParser(command) parser.feed( f.read() ) f.close() example = parser.example example = example.rstrip() if python: pass # start with basic info, gathered using mel help command, then update with info parsed from docs # we copy because we need access to the original basic info below basicFlags = basicInfo.get('flags', {}) flags = basicInfo['flags'].copy() flags.update( parser.flags ) # if we have a "true" mel boolean flag, then getCmdInfoBasic will return # numArgs == 0, but parsing the PYTHON docs will return a numArgs of 1; # keep the numArgs of 0 for flag, flagInfo in parser.flags.iteritems(): if flagInfo.get('args') == bool and flagInfo.get('numArgs') == 1: basicFlagInfo = basicFlags.get(flag, {}) if (basicFlagInfo.get('args') == bool and basicFlagInfo.get('numArgs') == 0): flagInfo['numArgs'] = 0 if command in secondaryFlags: for secondaryFlag, defaultValue, modifiedList in secondaryFlags[command]: #_logger.debug(command, "2nd", secondaryFlag) flags[secondaryFlag]['modified'] = modifiedList #_logger.debug(sorted(modifiedList)) #_logger.debug(sorted(parser.flags.keys())) for primaryFlag in modifiedList: #_logger.debug(command, "1st", primaryFlag) if 'secondaryFlags' in parser.flags[primaryFlag]: flags[primaryFlag]['secondaryFlags'].append(secondaryFlag) else: flags[primaryFlag]['secondaryFlags'] = [secondaryFlag] # add shortname lookup #_logger.debug((command, sorted( basicInfo['flags'].keys() ))) #_logger.debug((command, sorted( flags.keys() ))) # args and numArgs is more reliable from mel help command than from parsed docs, # so, here we put that back in place and create shortflags. # also use original 'multiuse' info... for flag, flagData in flags.items(): basicFlagData = basicFlags.get(flag) if basicFlagData: if 'args' in basicFlagData and 'numargs' in basicFlagData: flagData['args'] = basicFlagData['args'] flagData['numArgs'] = basicFlagData['numArgs'] if ( 'multiuse' in basicFlagData.get('modes', []) and 'multiuse' not in flagData.get('modes', [])): flagData.setdefault('modes', []).append('multiuse') shortFlags = basicInfo['shortFlags'] res = { 'flags': flags, 'shortFlags': shortFlags, 'description' : parser.description, 'example': example } try: res['removedFlags'] = basicInfo['removedFlags'] except KeyError: pass return res except IOError: _logger.debug("could not find docs for %s" % command) return basicInfo #raise IOError, "cannot find maya documentation directory" def fixCodeExamples(style='maya', force=False): """cycle through all examples from the maya docs, replacing maya.cmds with pymel and inserting pymel output. NOTE: this can only be run from gui mode WARNING: back up your preferences before running TODO: auto backup and restore of maya prefs """ manipOptions = cmds.manipOptions( q=1, handleSize=1, scale=1 ) animOptions = [] animOptions.append( cmds.animDisplay( q=1, timeCode=True ) ) animOptions.append( cmds.animDisplay( q=1, timeCodeOffset=True ) ) animOptions.append( cmds.animDisplay( q=1, modelUpdate=True ) ) openWindows = cmds.lsUI(windows=True) examples = CmdExamplesCache().read() processedExamples = CmdProcessedExamplesCache().read() processedExamples = {} if processedExamples is None else processedExamples allCmds = set(examples.keys()) # put commands that require manual interaction first manualCmds = ['fileBrowserDialog', 'fileDialog', 'fileDialog2', 'fontDialog'] skipCmds = ['colorEditor', 'emit', 'finder', 'doBlur', 'messageLine', 'renderWindowEditor', 'ogsRender', 'webBrowser', 'deleteAttrPattern', 'grabColor'] allCmds.difference_update(manualCmds) sortedCmds = manualCmds + sorted(allCmds) for command in sortedCmds: example = examples[command] if not force and command in processedExamples: _logger.info("%s: already completed. skipping." % command) continue _logger.info("Starting command %s", command) if style == 'doctest' : DOC_TEST_SKIP = ' #doctest: +SKIP' else: DOC_TEST_SKIP = '' # change from cmds to pymel reg = re.compile(r'\bcmds\.') example = example.replace('import maya.cmds as cmds', 'import pymel.core as pm' + DOC_TEST_SKIP, 1) example = reg.sub( 'pm.', example ) #example = example.replace( 'import maya.cmds as cmds', 'import pymel as pm\npm.newFile(f=1) #fresh scene' ) lines = example.split('\n') if len(lines)==1: _logger.info("removing empty example for command %s", command) examples.pop(command) processedExamples[command] = '' # write out after each success so that if we crash we don't have to start from scratch CmdProcessedExamplesCache().write(processedExamples) continue if command in skipCmds: example = '\n'.join( lines ) processedExamples[command] = example # write out after each success so that if we crash we don't have to start from scratch CmdProcessedExamplesCache().write(processedExamples) #lines.insert(1, 'pm.newFile(f=1) #fresh scene') # create a fresh scene. this does not need to be in the docstring unless we plan on using it in doctests, which is probably unrealistic cmds.file(new=1,f=1) newlines = [] statement = [] # narrowed down the commands that cause maya to crash to these prefixes if re.match( '(dis)|(dyn)|(poly)', command) : evaluate = False elif command in skipCmds: evaluate = False else: evaluate = True # gives a little leniency for where spaces are placed in the result line resultReg = re.compile('# Result:\s*(.*) #$') try: # funky things can happen when executing maya code: some exceptions somehow occur outside the eval/exec for i, line in enumerate(lines): res = None # replace with pymel results '# Result: 1 #' m = resultReg.match(line) if m: if evaluate is False: line = m.group(1) newlines.append(' ' + line) else: if evaluate: if line.strip().endswith(':') or line.startswith(' ') or line.startswith('\t'): statement.append(line) else: # evaluate the compiled statement using exec, which can do multi-line if statements and so on if statement: try: #_logger.debug("executing %s", statement) exec( '\n'.join(statement) ) # reset statement statement = [] except Exception, e: _logger.info("stopping evaluation %s", str(e))# of %s on line %r" % (command, line) evaluate = False try: _logger.debug("evaluating: %r" % line) res = eval( line ) #if res is not None: _logger.info("result", repr(repr(res))) #else: _logger.info("no result") except: #_logger.debug("failed evaluating:", str(e)) try: exec( line ) except (Exception, TypeError), e: _logger.info("stopping evaluation %s", str(e))# of %s on line %r" % (command, line) evaluate = False if style == 'doctest': if line.startswith(' ') or line.startswith('\t'): newlines.append(' ... ' + line ) else: newlines.append(' >>> ' + line + DOC_TEST_SKIP ) if res is not None: newlines.append( ' ' + repr(res) ) else: newlines.append(' ' + line ) if res is not None: newlines.append( ' # Result: %r #' % (res,) ) if evaluate: _logger.info("successful evaluation! %s", command) example = '\n'.join( newlines ) processedExamples[command] = example except Exception, e: raise #_logger.info("FAILED: %s: %s" % (command, e) ) else: # write out after each success so that if we crash we don't have to start from scratch CmdProcessedExamplesCache().write(processedExamples) # cleanup opened windows for ui in set(cmds.lsUI(windows=True)).difference(openWindows): try: cmds.deleteUI(ui, window=True) except:pass _logger.info("Done Fixing Examples") # restore manipulators and anim options print manipOptions cmds.manipOptions( handleSize=manipOptions[0], scale=manipOptions[1] ) print animOptions cmds.animDisplay( e=1, timeCode=animOptions[0], timeCodeOffset=animOptions[1], modelUpdate=animOptions[2]) #CmdExamplesCache(examples) def getModuleCommandList( category, version=None ): from parsers import CommandModuleDocParser parser = CommandModuleDocParser(category, version) return parser.parse() def getCallbackFlags(cmdInfo): """used parsed data and naming convention to determine which flags are callbacks""" commandFlags = [] try: flagDocs = cmdInfo['flags'] except KeyError: pass else: for flag, data in flagDocs.items(): if data['args'] in ['script', callable] or 'command' in flag.lower(): commandFlags += [flag, data['shortname']] return commandFlags def getModule(funcName, knownModuleCmds): # determine to which module this function belongs module = None if funcName in ['eval', 'file', 'filter', 'help', 'quit']: module = None elif funcName.startswith('ctx') or funcName.endswith('Ctx') or funcName.endswith('Context'): module = 'context' #elif funcName in self.uiClassList: # module = 'uiClass' #elif funcName in nodeHierarchyTree or funcName in nodeTypeToNodeCommand.values(): # module = 'node' else: for moduleName, commands in knownModuleCmds.iteritems(): if funcName in commands: module = moduleName break if module is None: if mm.eval('whatIs "%s"' % funcName ) == 'Run Time Command': module = 'runtime' else: module = 'other' return module #----------------------------------------------- # Command Help Documentation #----------------------------------------------- _cmdArgMakers = {} def cmdArgMakers(force=False): global _cmdArgMakers if _cmdArgMakers and not force: return _cmdArgMakers def makeCircle(): return cmds.circle()[0] def makeEp(): return makeCircle() + '.ep[1]' def makeSphere(): return cmds.polySphere()[0] def makeCube(): return cmds.polyCube()[0] def makeIk(): j1 = cmds.joint() j2 = cmds.joint() return cmds.ikHandle(j1, j2, solver='ikRPsolver')[0] def makeJoint(): return cmds.joint() def makeSkin(): j1 = cmds.joint() j2 = cmds.joint() sphere = makeSphere() return cmds.skinCluster(j1, j2, sphere)[0] _cmdArgMakers = \ { 'tangentConstraint' : ( makeCircle, makeCube ), 'poleVectorConstraint': ( makeSphere, makeIk ), 'pointCurveConstraint': ( makeEp, ), 'skinCluster' : ( makeJoint, makeJoint, makeSphere ), } constraintCmds = [x for x in dir(cmds) if x.endswith('onstraint') and not cmds.runTimeCommand(x, q=1, exists=1) and x != 'polySelectConstraint'] for constrCmd in constraintCmds: if constrCmd not in _cmdArgMakers: _cmdArgMakers[constrCmd] = ( makeSphere, makeCube ) return _cmdArgMakers def nodeCreationCmd(func, nodeType): argMakers = cmdArgMakers() # compile the args list for node creation createArgs = argMakers.get(nodeType, []) if createArgs: createArgs = [argMaker() for argMaker in createArgs] # run the function return func(*createArgs) def testNodeCmd( funcName, cmdInfo, nodeCmd=False, verbose=False ): _logger.info(funcName.center( 50, '=')) if funcName in [ 'character', 'lattice', 'boneLattice', 'sculpt', 'wire' ]: _logger.debug("skipping") return cmdInfo # These cause crashes... confirmed that pointOnPolyConstraint still # crashes in 2012 dangerousCmds = ['doBlur', 'pointOnPolyConstraint'] if funcName in dangerousCmds: _logger.debug("skipping 'dangerous command'") return cmdInfo def _formatCmd( cmd, args, kwargs ): args = [ x.__repr__() for x in args ] kwargs = [ '%s=%s' % (key, val.__repr__()) for key, val in kwargs.items() ] return '%s( %s )' % ( cmd, ', '.join( args+kwargs ) ) def _objectToType( result ): "convert a an instance or list of instances to a python type or list of types" if isinstance(result, list): return [ type(x) for x in result ] else: return type(result) _castList = [float, int, bool] # def _listIsCastable(resultType): # "ensure that all elements are the same type and that the types are castable" # try: # typ = resultType[0] # return typ in _castList and all([ x == typ for x in resultType ]) # except IndexError: # return False module = cmds try: func = getattr(module, funcName) except AttributeError: _logger.warning("could not find function %s in modules %s" % (funcName, module.__name__)) return cmdInfo # get the current list of objects in the scene so we can cleanup later, after we make nodes allObjsBegin = set( cmds.ls(l=1) ) try: # Attempt to create the node cmds.select(cl=1) # the arglist passed from creation to general testing args = [] constrObj = None if nodeCmd: #------------------ # CREATION #------------------ obj = nodeCreationCmd(func, funcName) if isinstance(obj, list): _logger.debug("Return %s", obj) if len(obj) == 1: _logger.info("%s: creation return values need unpacking" % funcName) cmdInfo['resultNeedsUnpacking'] = True elif not obj: raise ValueError, "returned object is an empty list" objTransform = obj[0] obj = obj[-1] if obj is None: #emptyFunctions.append( funcName ) raise ValueError, "Returned object is None" elif not cmds.objExists( obj ): raise ValueError, "Returned object %s is Invalid" % obj args = [obj] except (TypeError,RuntimeError, ValueError), msg: _logger.debug("failed creation: %s", msg) else: objType = cmds.objectType(obj) #------------------ # TESTING #------------------ #(func, args, data) = cmdList[funcName] #(usePyNode, baseClsName, nodeName) flags = cmdInfo['flags'] hasQueryFlag = flags.has_key( 'query' ) hasEditFlag = flags.has_key( 'edit' ) anyNumRe = re.compile('\d+') for flag in sorted(flags.keys()): flagInfo = flags[flag] if flag in ['query', 'edit']: continue assert flag != 'ype', "%s has bad flag" % funcName # special case for constraints if constrObj and flag in ['weight']: flagargs = [constrObj] + args else: flagargs = args try: modes = flagInfo['modes'] testModes = False except KeyError, msg: #raise KeyError, '%s: %s' % (flag, msg) #_logger.debug(flag, "Testing modes") flagInfo['modes'] = [] modes = [] testModes = True # QUERY val = None argtype = flagInfo['args'] if 'query' in modes or testModes == True: if hasQueryFlag: kwargs = {'query':True, flag:True} else: kwargs = { flag:True } cmd = _formatCmd(funcName, flagargs, kwargs) try: _logger.debug(cmd) val = func( *flagargs, **kwargs ) #_logger.debug(val) resultType = _objectToType(val) # ensure symmetry between edit and query commands: # if this flag is queryable and editable, then its queried value should be symmetric to its edit arguments if 'edit' in modes and argtype != resultType: # there are certain patterns of asymmetry which we can safely correct: singleItemList = (isinstance( resultType, list) and len(resultType) ==1 and 'multiuse' not in flagInfo.get('modes', [])) # [bool] --> bool if singleItemList and resultType[0] == argtype: _logger.info("%s, %s: query flag return values need unpacking" % (funcName, flag)) flagInfo['resultNeedsUnpacking'] = True val = val[0] # [int] --> bool elif singleItemList and argtype in _castList and resultType[0] in _castList: _logger.info("%s, %s: query flag return values need unpacking and casting" % (funcName, flag)) flagInfo['resultNeedsUnpacking'] = True flagInfo['resultNeedsCasting'] = True val = argtype(val[0]) # int --> bool elif argtype in _castList and resultType in _castList: _logger.info("%s, %s: query flag return values need casting" % (funcName, flag)) flagInfo['resultNeedsCasting'] = True val = argtype(val) else: # no valid corrctions found _logger.info(cmd) _logger.info("\treturn mismatch") _logger.info('\tresult: %s', val.__repr__()) _logger.info('\tpredicted type: %s', argtype) _logger.info('\tactual type: %s', resultType) # value is no good. reset to None, so that a default will be generated for edit val = None else: _logger.debug("\tsucceeded") _logger.debug('\tresult: %s', val.__repr__()) _logger.debug('\tresult type: %s', resultType) except TypeError, msg: # flag is no longer supported if str(msg).startswith( 'Invalid flag' ): #if verbose: _logger.info("removing flag %s %s %s", funcName, flag, msg) shortname = flagInfo['shortname'] flagInfo.pop(flag,None) flagInfo.pop(shortname,None) modes = [] # stop edit from running else: _logger.info(cmd) _logger.info("\t" + str(msg).rstrip('\n')) val = None except RuntimeError, msg: _logger.info(cmd) _logger.info("\tRuntimeError: " + str(msg).rstrip('\n') ) val = None except ValueError, msg: _logger.info(cmd) _logger.info("\tValueError: " + str(msg).rstrip('\n') ) val = None else: # some flags are only in mel help and not in maya docs, so we don't know their # supported per-flag modes. we fill that in here if 'query' not in flagInfo['modes']: flagInfo['modes'].append('query') # EDIT if 'edit' in modes or testModes == True: #_logger.debug("Args:", argtype) try: # we use the value returned from query above as defaults for putting back in as edit args # but if the return was empty we need to produce something to test on. # NOTE: this is just a guess if val is None: if isinstance(argtype, list): val = [] for typ in argtype: if type == unicode or isinstance(type,basestring): val.append('persp') else: if 'query' in modes: val.append( typ(0) ) # edit only, ensure that bool args are True else: val.append( typ(1) ) else: if argtype == unicode or isinstance(argtype,basestring): val = 'persp' elif 'query' in modes: val = argtype(0) else: # edit only, ensure that bool args are True val = argtype(1) kwargs = {'edit':True, flag:val} cmd = _formatCmd(funcName, args, kwargs) _logger.debug(cmd) # some commands will either delete or rename a node, ie: # spaceLocator(e=1, name=...) # container(e=1, removeContainer=True ) # ...which will then make subsequent cmds fail. # To get around this, we need to undo the cmd. try: cmds.undoInfo(openChunk=True) editResult = func( *args, **kwargs ) finally: cmds.undoInfo(closeChunk=True) if not cmds.objExists(obj): # cmds.camera(e=1, name=...) does weird stuff - it # actually renames the parent transform, even if you give # the name of the shape... which means the shape # then gets a second 'Shape1' tacked at the end... # ...and in addition, undo is broken as well. # So we need a special case for this, where we rename... if objType == 'camera' and flag == 'name': _logger.info('\t(Undoing camera rename)') renamePattern = anyNumRe.sub('*', obj) possibleRenames = cmds.ls(renamePattern, type=objType) possibleRenames = [x for x in possibleRenames if x not in allObjsBegin] # newName might not be the exact same as our original, # but as long as it's the same maya type, and isn't # one of the originals, it shouldn't matter... newName = possibleRenames[-1] cmds.rename(newName, obj) else: _logger.info('\t(Undoing cmd)') cmds.undo() _logger.debug("\tsucceeded") #_logger.debug('\t%s', editResult.__repr__()) #_logger.debug('\t%s %s', argtype, type(editResult)) #_logger.debug("SKIPPING %s: need arg of type %s" % (flag, flagInfo['argtype'])) except TypeError, msg: if str(msg).startswith( 'Invalid flag' ): #if verbose: # flag is no longer supported _logger.info("removing flag %s %s %s", funcName, flag, msg) shortname = flagInfo['shortname'] flagInfo.pop(flag,None) flagInfo.pop(shortname,None) else: _logger.info(funcName) _logger.info("\t" + str(msg).rstrip('\n')) _logger.info("\tpredicted arg: %s", argtype) if not 'query' in modes: _logger.info("\tedit only") except RuntimeError, msg: _logger.info(cmd) _logger.info("\t" + str(msg).rstrip('\n')) _logger.info("\tpredicted arg: %s", argtype) if not 'query' in modes: _logger.info("\tedit only") except ValueError, msg: _logger.info(cmd) _logger.info("\tValueError: " + str(msg).rstrip('\n') ) val = None else: if 'edit' not in flagInfo['modes']: flagInfo['modes'].append('edit') # cleanup allObjsEnd = set( cmds.ls(l=1) ) newObjs = list(allObjsEnd.difference( allObjsBegin ) ) if newObjs: cmds.delete( newObjs ) return cmdInfo def _getNodeHierarchy( version=None ): """ get node hierarchy as a list of 3-value tuples: ( nodeType, parents, children ) """ import pymel.util.trees as trees import pymel.internal.apicache as apicache if versions.current() >= versions.v2012: # We now have nodeType(isTypeName)! yay! inheritances = {} for nodeType in apicache._getAllMayaTypes(): try: inheritances[nodeType] = apicache.getInheritance(nodeType) except apicache.ManipNodeTypeError: continue parentTree = {} # Convert inheritance lists node=>parent dict for nodeType, inheritance in inheritances.iteritems(): for i in xrange(len(inheritance)): child = inheritance[i] if i == 0: if child == 'dependNode': continue else: parent = 'dependNode' else: parent = inheritance[i - 1] if child in parentTree: assert parentTree[child] == parent, "conflicting parents: node type '%s' previously determined parent was '%s'. now '%s'" % (child, parentTree[child], parent) else: parentTree[child] = parent nodeHierarchyTree = trees.treeFromDict(parentTree) else: from .parsers import NodeHierarchyDocParser parser = NodeHierarchyDocParser(version) nodeHierarchyTree = trees.IndexedTree(parser.parse()) return [ (x.value, tuple(y.value for y in x.parents()), tuple(y.value for y in x.childs()) ) \ for x in nodeHierarchyTree.preorder() ] class CmdExamplesCache(startup.PymelCache): NAME = 'mayaCmdsExamples' DESC = 'the list of Maya command examples' USE_VERSION = True class CmdProcessedExamplesCache(CmdExamplesCache): USE_VERSION = False class CmdDocsCache(startup.PymelCache): NAME = 'mayaCmdsDocs' DESC = 'the Maya command documentation' class CmdCache(startup.SubItemCache): NAME = 'mayaCmdsList' DESC = 'the list of Maya commands' _CACHE_NAMES = '''cmdlist nodeHierarchy uiClassList nodeCommandList moduleCmds'''.split() CACHE_TYPES = {'nodeHierarchy':list, 'uiClassList':list, 'nodeCommandList':list, } def rebuild(self) : """Build and save to disk the list of Maya Python commands and their arguments WARNING: will unload existing plugins, then (re)load all maya-installed plugins, without making an attempt to return the loaded plugins to the state they were at before this command is run. Also, the act of loading all the plugins may crash maya, especially if done from a non-GUI session """ # Put in a debug, because this can be crashy _logger.debug("Starting CmdCache.rebuild...") # With extension can't get docs on unix 64 # path is # /usr/autodesk/maya2008-x64/docs/Maya2008/en_US/Nodes/index_hierarchy.html # and not # /usr/autodesk/maya2008-x64/docs/Maya2008-x64/en_US/Nodes/index_hierarchy.html long_version = versions.installName() from parsers import mayaDocsLocation cmddocs = os.path.join(mayaDocsLocation(long_version), 'CommandsPython') assert os.path.exists(cmddocs), "Command documentation does not exist: %s" % cmddocs _logger.info("Rebuilding the maya node hierarchy...") # Load all plugins to get the nodeHierarchy / nodeFunctions import pymel.api.plugins as plugins # We don't want to add in plugin nodes / commands - let that be done # by the plugin callbacks. However, unloading mechanism is not 100% # ... sometimes functions get left in maya.cmds... and then trying # to use those left-behind functions can cause crashes (ie, # FBXExportQuaternion). So check which methods SHOULD be unloaded # first, so we know to skip those if we come across them even after # unloading the plugin pluginCommands = set() loadedPlugins = cmds.pluginInfo(q=True, listPlugins=True) if loadedPlugins: for plug in loadedPlugins: plugCmds = plugins.pluginCommands(plug) if plugCmds: pluginCommands.update(plugCmds) plugins.unloadAllPlugins() self.nodeHierarchy = _getNodeHierarchy(long_version) nodeFunctions = [ x[0] for x in self.nodeHierarchy ] nodeFunctions += nodeTypeToNodeCommand.values() _logger.info("Rebuilding the list of Maya commands...") #nodeHierarchyTree = trees.IndexedTree(self.nodeHierarchy) self.uiClassList = UI_COMMANDS self.nodeCommandList = [] tmpModuleCmds = {} for moduleName, longname in moduleNameShortToLong.items(): tmpModuleCmds[moduleName] = getModuleCommandList( longname, long_version ) tmpCmdlist = inspect.getmembers(cmds, callable) #self.moduleCmds = defaultdict(list) self.moduleCmds = dict( (k,[]) for k in moduleNameShortToLong.keys() ) self.moduleCmds.update( {'other':[], 'runtime': [], 'context': [], 'uiClass': [] } ) def addCommand(funcName): _logger.debug('adding command: %s' % funcName) module = getModule(funcName, tmpModuleCmds) cmdInfo = {} if module: self.moduleCmds[module].append(funcName) if module != 'runtime': cmdInfo = getCmdInfo(funcName, long_version) if module != 'windows': if funcName in nodeFunctions: self.nodeCommandList.append(funcName) cmdInfo = testNodeCmd( funcName, cmdInfo, nodeCmd=True, verbose=True ) #elif module != 'context': # cmdInfo = testNodeCmd( funcName, cmdInfo, nodeCmd=False, verbose=True ) cmdInfo['type'] = module flags = getCallbackFlags(cmdInfo) if flags: cmdInfo['callbackFlags'] = flags self.cmdlist[funcName] = cmdInfo # # func, args, (usePyNode, baseClsName, nodeName) # # args = dictionary of command flags and their data # # usePyNode = determines whether the class returns its 'nodeName' or uses PyNode to dynamically return # # baseClsName = for commands which should generate a class, this is the name of the superclass to inherit # # nodeName = most creation commands return a node of the same name, this option is provided for the exceptions # try: # self.cmdlist[funcName] = args, pymelCmdsList[funcName] ) # except KeyError: # # context commands generate a class based on unicode (which is triggered by passing 'None' to baseClsName) # if funcName.startswith('ctx') or funcName.endswith('Ctx') or funcName.endswith('Context'): # self.cmdlist[funcName] = (funcName, args, (False, None, None) ) # else: # self.cmdlist[funcName] = (funcName, args, () ) for funcName, _ in tmpCmdlist : if funcName in pluginCommands: _logger.debug("command %s was a plugin command that should have been unloaded - skipping" % funcName) continue addCommand(funcName) # split the cached data for lazy loading cmdDocList = {} examples = {} for cmdName, cmdInfo in self.cmdlist.iteritems(): try: examples[cmdName] = cmdInfo.pop('example') except KeyError: pass newCmdInfo = {} if 'description' in cmdInfo: newCmdInfo['description'] = cmdInfo.pop('description') newFlagInfo = {} if 'flags' in cmdInfo: for flag, flagInfo in cmdInfo['flags'].iteritems(): newFlagInfo[flag] = { 'docstring' : flagInfo.pop('docstring') } newCmdInfo['flags'] = newFlagInfo if newCmdInfo: cmdDocList[cmdName] = newCmdInfo CmdDocsCache().write(cmdDocList) CmdExamplesCache().write(examples) def build(self): super(CmdCache, self).build() # corrections that are always made, to both loaded and freshly built caches util.mergeCascadingDicts( cmdlistOverrides, self.cmdlist ) # add in any nodeCommands added after cache rebuild self.nodeCommandList = set(self.nodeCommandList).union(nodeTypeToNodeCommand.values()) self.nodeCommandList = sorted( self.nodeCommandList ) for module, funcNames in moduleCommandAdditions.iteritems(): for funcName in funcNames: currModule = self.cmdlist[funcName]['type'] if currModule != module: self.cmdlist[funcName]['type'] = module id = self.moduleCmds[currModule].index(funcName) self.moduleCmds[currModule].pop(id) self.moduleCmds[module].append(funcName) return (self.cmdlist,self.nodeHierarchy,self.uiClassList,self.nodeCommandList,self.moduleCmds)<|fim▁end|>
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>from rest_framework import serializers from .models import User, Activity, Period class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ('url', 'username', 'email') extra_kwargs = { 'url': {'view_name': 'timeperiod:user-detail'}, } class ActivitySerializer(serializers.HyperlinkedModelSerializer): user = serializers.HiddenField(default=serializers.CurrentUserDefault()) class Meta: model = Activity fields = ('url', 'user', 'name', 'total', 'running') extra_kwargs = { 'url': {'view_name': 'timeperiod:activity-detail'}, 'user': {'view_name': 'timeperiod:user-detail'}, } class PeriodSerializer(serializers.HyperlinkedModelSerializer): class Meta:<|fim▁hole|> fields = ('url', 'activity', 'start', 'end', 'valid') extra_kwargs = { 'url': {'view_name': 'timeperiod:period-detail'}, 'activity': {'view_name': 'timeperiod:activity-detail'}, }<|fim▁end|>
model = Period
<|file_name|>KnowledgeBaseService.java<|end_file_name|><|fim▁begin|>package jp.ac.nii.prl.mape.controller.service; import jp.ac.nii.prl.mape.controller.model.MAPEKComponent; public interface KnowledgeBaseService { <|fim▁hole|> }<|fim▁end|>
void put(MAPEKComponent kb, String bx, String view, String param); String get(MAPEKComponent kb, String bx, String param);
<|file_name|>getOneTrustConsent.jest.ts<|end_file_name|><|fim▁begin|>import { delay } from "../delay" import { getOneTrustConsent } from "../getOneTrustConsent" import { oneTrustReady } from "../oneTrustReady" jest.mock("../delay") jest.mock("../oneTrustReady") describe("getOneTrustConsent", () => { const delayMock = delay as jest.Mock const oneTrustReadyMock = oneTrustReady as jest.Mock beforeEach(() => { delayMock.mockImplementation(() => Promise.resolve()) }) afterEach(() => { delayMock.mockRestore() oneTrustReadyMock.mockRestore() }) it("returns empty string if onetrust is never ready", async () => { oneTrustReadyMock.mockImplementation(() => { return false }) const result = await getOneTrustConsent()<|fim▁hole|> expect(delayMock).toHaveBeenCalledTimes(101) expect(oneTrustReadyMock).toHaveBeenCalledWith() expect(oneTrustReadyMock).toHaveBeenCalledTimes(103) expect(result).toBe("") }) it("returns onetrust consent string if onetrust is ready", async () => { oneTrustReadyMock.mockImplementation(() => { return true }) window.OnetrustActiveGroups = "C0001" const result = await getOneTrustConsent() expect(delayMock).not.toHaveBeenCalled() expect(oneTrustReadyMock).toHaveBeenCalledWith() expect(result).toBe("C0001") }) })<|fim▁end|>
expect(delayMock).toHaveBeenCalledWith(10)
<|file_name|>index.js<|end_file_name|><|fim▁begin|>"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.getBorderCharacters = exports.createStream = exports.table = void 0; const createStream_1 = require("./createStream");<|fim▁hole|>const table_1 = require("./table"); Object.defineProperty(exports, "table", { enumerable: true, get: function () { return table_1.table; } }); __exportStar(require("./types/api"), exports); //# sourceMappingURL=index.js.map<|fim▁end|>
Object.defineProperty(exports, "createStream", { enumerable: true, get: function () { return createStream_1.createStream; } }); const getBorderCharacters_1 = require("./getBorderCharacters"); Object.defineProperty(exports, "getBorderCharacters", { enumerable: true, get: function () { return getBorderCharacters_1.getBorderCharacters; } });
<|file_name|>m1.js<|end_file_name|><|fim▁begin|>var m1_a1 = 10; var m1_c1 = (function () { function m1_c1() { } return m1_c1; })(); <|fim▁hole|>var m1_instance1 = new m1_c1(); function m1_f1() { return m1_instance1; } //# sourceMappingURL=m1.js.map<|fim▁end|>
<|file_name|>shared.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>pub const EMPTY_COLUMN: Column = [0; DISPLAY_HEIGHT]; pub const DISPLAY_WIDTH: usize = 17; pub const DISPLAY_HEIGHT: usize = 7;<|fim▁end|>
pub type Column = [u8; DISPLAY_HEIGHT];
<|file_name|>msg.go<|end_file_name|><|fim▁begin|>package darwin import ( "github.com/currantlabs/ble" "github.com/raff/goble/xpc" ) type msg xpc.Dict func (m msg) id() int { return xpc.Dict(m).MustGetInt("kCBMsgId") } func (m msg) args() xpc.Dict { return xpc.Dict(m).MustGetDict("kCBMsgArgs") } func (m msg) advertisementData() xpc.Dict { return xpc.Dict(m).MustGetDict("kCBMsgArgAdvertisementData") } func (m msg) attMTU() int { return xpc.Dict(m).MustGetInt("kCBMsgArgATTMTU") } func (m msg) attWrites() xpc.Array { return xpc.Dict(m).MustGetArray("kCBMsgArgATTWrites") } func (m msg) attributeID() int { return xpc.Dict(m).MustGetInt("kCBMsgArgAttributeID") } func (m msg) characteristicHandle() int { return xpc.Dict(m).MustGetInt("kCBMsgArgCharacteristicHandle") } func (m msg) data() []byte { // return xpc.Dict(m).MustGetBytes("kCBMsgArgData") v := m["kCBMsgArgData"] switch v.(type) { case string: return []byte(v.(string)) case []byte: return v.([]byte) default: return nil } } func (m msg) deviceUUID() xpc.UUID { return xpc.Dict(m).MustGetUUID("kCBMsgArgDeviceUUID") } func (m msg) ignoreResponse() int { return xpc.Dict(m).MustGetInt("kCBMsgArgIgnoreResponse") } func (m msg) offset() int { return xpc.Dict(m).MustGetInt("kCBMsgArgOffset") } func (m msg) isNotification() int { return xpc.Dict(m).GetInt("kCBMsgArgIsNotification", 0) } func (m msg) result() int { return xpc.Dict(m).MustGetInt("kCBMsgArgResult") } func (m msg) state() int { return xpc.Dict(m).MustGetInt("kCBMsgArgState") } func (m msg) rssi() int { return xpc.Dict(m).MustGetInt("kCBMsgArgData") } func (m msg) transactionID() int { return xpc.Dict(m).MustGetInt("kCBMsgArgTransactionID") } func (m msg) uuid() string { return xpc.Dict(m).MustGetHexBytes("kCBMsgArgUUID") } func (m msg) serviceStartHandle() int { return xpc.Dict(m).MustGetInt("kCBMsgArgServiceStartHandle") } func (m msg) serviceEndHandle() int { return xpc.Dict(m).MustGetInt("kCBMsgArgServiceEndHandle") } func (m msg) services() xpc.Array { return xpc.Dict(m).MustGetArray("kCBMsgArgServices") } func (m msg) characteristics() xpc.Array { return xpc.Dict(m).MustGetArray("kCBMsgArgCharacteristics") } func (m msg) characteristicProperties() int { return xpc.Dict(m).MustGetInt("kCBMsgArgCharacteristicProperties") } func (m msg) characteristicValueHandle() int { return xpc.Dict(m).MustGetInt("kCBMsgArgCharacteristicValueHandle") } func (m msg) descriptors() xpc.Array { return xpc.Dict(m).MustGetArray("kCBMsgArgDescriptors") } func (m msg) descriptorHandle() int { return xpc.Dict(m).MustGetInt("kCBMsgArgDescriptorHandle") } func (m msg) connectionInterval() int { return xpc.Dict(m).MustGetInt("kCBMsgArgConnectionInterval") } func (m msg) connectionLatency() int { return xpc.Dict(m).MustGetInt("kCBMsgArgConnectionLatency") } func (m msg) supervisionTimeout() int { return xpc.Dict(m).MustGetInt("kCBMsgArgSupervisionTimeout") } func (m msg) err() error { if code := m.result(); code != 0 { return ble.ATTError(code)<|fim▁hole|> return nil }<|fim▁end|>
}
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
var scroller = angular.module("scroller", ["ngTouch", "angular-websql"]);
<|file_name|>drive_to_point_controller.py<|end_file_name|><|fim▁begin|>import logging import logging.config import sys import threading import os from amberclient.collision_avoidance.collision_avoidance_proxy import CollisionAvoidanceProxy from amberclient.common.amber_client import AmberClient from amberclient.location.location import LocationProxy from amberclient.roboclaw.roboclaw import RoboclawProxy from amberdriver.common.message_handler import MessageHandler from amberdriver.drive_to_point import drive_to_point_pb2 from amberdriver.drive_to_point.drive_to_point import DriveToPoint from amberdriver.tools import config __author__ = 'paoolo' pwd = os.path.dirname(os.path.abspath(__file__)) logging.config.fileConfig('%s/drive_to_point.ini' % pwd) config.add_config_ini('%s/drive_to_point.ini' % pwd) LOGGER_NAME = 'DriveToPointController' USE_COLLISION_AVOIDANCE = config.DRIVE_TO_POINT_USE_COLLISION_AVOIDANCE == 'True' class DriveToPointController(MessageHandler): def __init__(self, pipe_in, pipe_out, driver): MessageHandler.__init__(self, pipe_in, pipe_out) self.__drive_to_point = driver self.__logger = logging.getLogger(LOGGER_NAME) def handle_data_message(self, header, message): if message.HasExtension(drive_to_point_pb2.setTargets): self.__handle_set_targets(header, message) elif message.HasExtension(drive_to_point_pb2.getNextTarget): self.__handle_get_next_target(header, message) elif message.HasExtension(drive_to_point_pb2.getNextTargets): self.__handle_get_next_targets(header, message) elif message.HasExtension(drive_to_point_pb2.getVisitedTarget): self.__handle_get_visited_target(header, message) elif message.HasExtension(drive_to_point_pb2.getVisitedTargets): self.__handle_get_visited_targets(header, message) elif message.HasExtension(drive_to_point_pb2.getConfiguration): self.__handle_get_configuration(header, message) else: self.__logger.warning('No request in message') def __handle_set_targets(self, header, message): self.__logger.debug('Set targets') targets = message.Extensions[drive_to_point_pb2.targets] targets = zip(targets.longitudes, targets.latitudes, targets.radiuses) self.__drive_to_point.set_targets(targets) @MessageHandler.handle_and_response def __handle_get_next_target(self, received_header, received_message, response_header, response_message): self.__logger.debug('Get next target') next_target, current_location = self.__drive_to_point.get_next_target_and_location() targets = response_message.Extensions[drive_to_point_pb2.targets] targets.longitudes.extend([next_target[0]]) targets.latitudes.extend([next_target[1]]) targets.radiuses.extend([next_target[2]])<|fim▁hole|> location = response_message.Extensions[drive_to_point_pb2.location] location.x, location.y, location.p, location.alfa, location.timeStamp = current_location response_message.Extensions[drive_to_point_pb2.getNextTarget] = True return response_header, response_message @MessageHandler.handle_and_response def __handle_get_next_targets(self, received_header, received_message, response_header, response_message): self.__logger.debug('Get next targets') next_targets, current_location = self.__drive_to_point.get_next_targets_and_location() targets = response_message.Extensions[drive_to_point_pb2.targets] targets.longitudes.extend(map(lambda next_target: next_target[0], next_targets)) targets.latitudes.extend(map(lambda next_target: next_target[1], next_targets)) targets.radiuses.extend(map(lambda next_target: next_target[2], next_targets)) location = response_message.Extensions[drive_to_point_pb2.location] location.x, location.y, location.p, location.alfa, location.timeStamp = current_location response_message.Extensions[drive_to_point_pb2.getNextTargets] = True return response_header, response_message @MessageHandler.handle_and_response def __handle_get_visited_target(self, received_header, received_message, response_header, response_message): self.__logger.debug('Get visited target') visited_target, current_location = self.__drive_to_point.get_visited_target_and_location() targets = response_message.Extensions[drive_to_point_pb2.targets] targets.longitudes.extend([visited_target[0]]) targets.latitudes.extend([visited_target[1]]) targets.radiuses.extend([visited_target[2]]) location = response_message.Extensions[drive_to_point_pb2.location] location.x, location.y, location.p, location.alfa, location.timeStamp = current_location response_message.Extensions[drive_to_point_pb2.getVisitedTarget] = True return response_header, response_message @MessageHandler.handle_and_response def __handle_get_visited_targets(self, received_header, received_message, response_header, response_message): self.__logger.debug('Get visited targets') visited_targets, current_location = self.__drive_to_point.get_visited_targets_and_location() targets = response_message.Extensions[drive_to_point_pb2.targets] targets.longitudes.extend(map(lambda target: target[0], visited_targets)) targets.latitudes.extend(map(lambda target: target[1], visited_targets)) targets.radiuses.extend(map(lambda target: target[2], visited_targets)) location = response_message.Extensions[drive_to_point_pb2.location] location.x, location.y, location.p, location.alfa, location.timeStamp = current_location response_message.Extensions[drive_to_point_pb2.getVisitedTargets] = True return response_header, response_message @MessageHandler.handle_and_response def __handle_get_configuration(self, received_header, received_message, response_header, response_message): self.__logger.debug('Get configuration') configuration = response_message.Extensions[drive_to_point_pb2.configuration] configuration.maxSpeed = self.__drive_to_point.MAX_SPEED response_message.Extensions[drive_to_point_pb2.getConfiguration] = True return response_header, response_message def handle_subscribe_message(self, header, message): self.__logger.debug('Subscribe action, nothing to do...') def handle_unsubscribe_message(self, header, message): self.__logger.debug('Unsubscribe action, nothing to do...') def handle_client_died_message(self, client_id): self.__logger.info('Client %d died, stop!', client_id) self.__drive_to_point.set_targets([]) if __name__ == '__main__': client_for_location = AmberClient('127.0.0.1', name="location") client_for_driver = AmberClient('127.0.0.1', name="driver") location_proxy = LocationProxy(client_for_location, 0) if USE_COLLISION_AVOIDANCE: driver_proxy = CollisionAvoidanceProxy(client_for_driver, 0) else: driver_proxy = RoboclawProxy(client_for_driver, 0) drive_to_point = DriveToPoint(driver_proxy, location_proxy) driving_thread = threading.Thread(target=drive_to_point.driving_loop, name="driving-thread") driving_thread.start() location_thread = threading.Thread(target=drive_to_point.location_loop, name="location-thread") location_thread.start() controller = DriveToPointController(sys.stdin, sys.stdout, drive_to_point) controller()<|fim▁end|>
<|file_name|>13a57b7f084_add_user.py<|end_file_name|><|fim▁begin|>"""Add user Revision ID: 13a57b7f084 Revises: None Create Date: 2014-05-11 17:12:17.244013 """ # revision identifiers, used by Alembic. revision = '13a57b7f084' down_revision = None from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('user',<|fim▁hole|> sa.Column('email', sa.String(length=100), nullable=True), sa.Column('password_hash', sa.String(length=1000), nullable=True), sa.PrimaryKeyConstraint('id') ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_table('user') ### end Alembic commands ###<|fim▁end|>
sa.Column('id', sa.Integer(), nullable=False),
<|file_name|>HeadingsParser.py<|end_file_name|><|fim▁begin|>import re class HeadingsParser(): """ The HeadingParser parses the document for headings. NOT YET: converts headings to raw latex headings in the correct way, so that they can be referrenced to later see https://www.sharelatex.com/learn/Sections_and_chapters for info about the levels""" def __init__(self): super().__init__() self.title = None self.subtitle = None self.heading = [] # regexes<|fim▁hole|> r''' ^ # beginning of line [ ] # one whitespace [A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace (?P<title>[A-Za-z0-9äöüÄÖÜ ]+) # alphanumerical string, whitespace ok [A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace [ ] # one whitespace $ # end of line ''', re.VERBOSE|re.UNICODE ) self.subtitle_start_marker_regex = re.compile(r'[-]{3,}') self.subtitle_end_marker_regex = re.compile(r'[-]{3,}') self.subtitle_content_regex = re.compile( r''' ^ # beginning of line [ ] # one whitespace [A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace (?P<subtitle>[A-Za-z0-9äöüÄÖÜ ]+) # alphanumerical string, whitespace ok [A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace [ ] # one whitespace $ # end of line ''', re.VERBOSE|re.UNICODE ) # Headings cannot begin with whitespace self.h_content_regex = re.compile( r''' ^ # beginning of line [A-Za-z0-9äöüÄÖÜß(] # alphanum [A-Za-z0-9äöüÄÖÜß,() -]* # alphanum or space [A-Za-z0-9äöüÄÖÜß)] # alphanum $ # end of line ''', re.VERBOSE|re.UNICODE ) # chapter self.h1_underlining_regex = re.compile(r'[=]{3,}') # section self.h2_underlining_regex = re.compile(r'[-]{3,}') # subsection self.h3_underlining_regex = re.compile(r'[~]{3,}') # subsubsection self.h4_underlining_regex = re.compile(r'[\^]{3,}') # paragraph self.h5_underlining_regex = re.compile(r'[*]{3,}') # subparagraph self.h6_underlining_regex = re.compile(r'[.]{3,}') def parse(self, rst_file_content): self.title = self.find_title(rst_file_content) self.subtitle_content_regex = self.find_subtitle(rst_file_content) return self.find_heading_labels(rst_file_content) def find_title(self, rst_file_content): print('looking for title ...') title = None for lineno, line in enumerate(rst_file_content): previous_line = "" if lineno > 0: previous_line = rst_file_content[lineno - 1] next_line = "" if lineno < len(rst_file_content) - 1: next_line = rst_file_content[lineno + 1] # title if ( self.title_start_marker_regex.match(previous_line) and self.title_end_marker_regex.match(next_line) and ( len(self.title_start_marker_regex.match(previous_line).group()) == len(self.title_end_marker_regex.match(next_line).group()) ) and self.title_content_regex.match(line) and not title ): title = self.title_content_regex.match(line).group('title') print('title is:|', title, '|', sep='') break if not title: print('Could not find title in document.') return title def find_subtitle(self, rst_file_content): print('looking for subtitle ...') subtitle = None for lineno, line in enumerate(rst_file_content): previous_line = "" if lineno > 0: previous_line = rst_file_content[lineno - 1] next_line = "" if lineno < len(rst_file_content) - 1: next_line = rst_file_content[lineno + 1] if ( self.subtitle_start_marker_regex.match(previous_line) and self.subtitle_end_marker_regex.match(next_line) and ( len(self.subtitle_start_marker_regex.match(previous_line).group()) == len(self.subtitle_end_marker_regex.match(next_line).group()) ) and self.subtitle_content_regex.match(line) and not subtitle ): subtitle = self.subtitle_content_regex.match(line).group('subtitle') print('subtitle is:|', subtitle, '|', sep='') break if not subtitle: print('Could not find subtitle in document.') return subtitle def find_heading_labels(self, rst_file_content): print('looking for headings ...') headings_dict = {} # heading_labels = [] for lineno, line in enumerate(rst_file_content): # print('current line:', lineno) # print('current line:', line) # if line.startswith("Schlussfolgerungen"): # print('current line:', line) previous_line = "" if lineno > 0: previous_line = rst_file_content[lineno - 1] next_line = "" if lineno < len(rst_file_content) - 1: next_line = rst_file_content[lineno + 1] # headings level 1 # print('looking for h1 ...') if ( (previous_line.isspace() or previous_line == '') and self.h_content_regex.match(line) and self.h1_underlining_regex.match(next_line) and len(self.h_content_regex.match(line).group()) == len(self.h1_underlining_regex.match(next_line).group()) ): print('found a h1:', line) print('replacing chapter heading') headings_dict[line] = self.heading_to_label(line, 'chapter') # heading_labels.append(self.heading_to_label(line, 'chapter')) rst_file_content[lineno] = ':raw-latex:`\chapter{' + line + '}`' rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'chapter') + '}`' # headings level 2 # print('looking for h2 ...') if ( (previous_line.isspace() or previous_line == '') and self.h_content_regex.match(line) and self.h2_underlining_regex.match(next_line) and len(self.h_content_regex.match(line).group()) == len(self.h2_underlining_regex.match(next_line).group()) ): print('found a h2:', line) headings_dict[line] = self.heading_to_label(line, 'section') # heading_labels.append(self.heading_to_label(line, 'section')) rst_file_content[lineno] = ':raw-latex:`\section{' + line + '}`' rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'section') + '}`' # headings level 3 # print('looking for h3 ...') if ( (previous_line.isspace() or previous_line == '') and self.h_content_regex.match(line) and self.h3_underlining_regex.match(next_line) and len(self.h_content_regex.match(line).group()) == len(self.h3_underlining_regex.match(next_line).group()) ): print('found a h3:', line) # heading_labels.append(self.heading_to_label(line, 'subsection')) headings_dict[line] = self.heading_to_label(line, 'subsection') rst_file_content[lineno] = ':raw-latex:`\subsection{' + line + '}`' rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'subsection') + '}`' # headings level 4 # print('looking for h4 ...') if ( (previous_line.isspace() or previous_line == '') and self.h_content_regex.match(line) and self.h4_underlining_regex.match(next_line) and len(self.h_content_regex.match(line).group()) == len(self.h4_underlining_regex.match(next_line).group()) ): print('found a h4:', line) # heading_labels.append(self.heading_to_label(line, 'subsubsection')) headings_dict[line] = self.heading_to_label(line, 'subsubsection') rst_file_content[lineno] = ':raw-latex:`\subsubsection{' + line + '}`' rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'subsubsection') + '}`' # headings level 5 # print('looking for h5 ...') if ( (previous_line.isspace() or previous_line == '') and self.h_content_regex.match(line) and self.h5_underlining_regex.match(next_line) and len(self.h_content_regex.match(line).group()) == len(self.h5_underlining_regex.match(next_line).group()) ): print('found a h5:', line) # heading_labels.append(self.heading_to_label(line, 'paragraph')) headings_dict[line] = self.heading_to_label(line, 'paragraph') rst_file_content[lineno] = ':raw-latex:`\paragraph{' + line + '}`' rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'paragraph') + '}`' # headings level 6 # print('looking for h6 ...') if ( (previous_line.isspace() or previous_line == '') and self.h_content_regex.match(line) and self.h6_underlining_regex.match(next_line) and len(self.h_content_regex.match(line).group()) == len(self.h6_underlining_regex.match(next_line).group()) ): print('found a h6:', line) # heading_labels.append(self.heading_to_label(line, 'subparagraph')) headings_dict[line] = self.heading_to_label(line, 'subparagraph') rst_file_content[lineno] = ':raw-latex:`\subparagraph{' + line + '}`' rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'subparagraph') + '}`' return headings_dict def heading_to_label(self, heading_text, level): heading_text = heading_text.lower() replaced_chars = { ' ': '-', '(': '', ')': '' } for key,value in replaced_chars.items(): heading_text = heading_text.replace(key, value) return '{0}:{1}'.format(level, heading_text) # self.chapter_delimiter_regex = re.compile(r'={3,}') # ============= # self.section_delimiter_regex = re.compile(r'-{3,}') # ------------- # self.subsection_delimiter_regex = re.compile(r'~{3,}') # ~~~~~~~~~~~~~ # self.subsubsection_delimiter_regex = re.compile(r'\^{3,}') # ^^^^^^^^^^^^^ # self.heading_text_regex = re.compile( # r''' # ^ # \s* # (?P<title_text> # [a-zA-Z0-9] # [a-zA-Z0-9_ -]* # [a-zA-Z0-9] # ) # \s* # $''', # re.VERBOSE) # self.heading_keys = [] # def parse_headings(self, rst_file_content): # for lineno, line in enumerate(rst_file_content): # # # search for title # if self.title_delimiter_regex.search(line) is not None: # if (lineno >= 2): # if ( # self.title_delimiter_regex.search(rst_file_content[lineno - 2]) is not None and # self.heading_text_regex.search(rst_file_content[lineno - 1]) is not None # ): # title_text = self.heading_text_regex.findall(rst_file_content[lineno - 1])[0].strip() # self.heading_keys.append(re.sub('\s+', '-', title_text.lower())) # print('[DEBUG:HEADINGS]', self.heading_keys) # print('[DEBUG:HEADINGS] !!! found a title in the document:', title_text, sep='') # # # TODO: elif subtitle<|fim▁end|>
self.title_start_marker_regex = re.compile(r'[=]{3,}') self.title_end_marker_regex = re.compile(r'[=]{3,}') self.title_content_regex = re.compile(
<|file_name|>testMessagePartHitInfo.java<|end_file_name|><|fim▁begin|>package generated.zcsclient.mail; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlType; <|fim▁hole|> * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="messagePartHitInfo"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="e" type="{urn:zimbraMail}emailInfo" minOccurs="0"/> * &lt;element name="su" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;/sequence> * &lt;attribute name="id" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="sf" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="s" type="{http://www.w3.org/2001/XMLSchema}long" /> * &lt;attribute name="d" type="{http://www.w3.org/2001/XMLSchema}long" /> * &lt;attribute name="cid" type="{http://www.w3.org/2001/XMLSchema}int" /> * &lt;attribute name="mid" type="{http://www.w3.org/2001/XMLSchema}int" /> * &lt;attribute name="ct" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="name" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="part" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "messagePartHitInfo", propOrder = { "e", "su" }) public class testMessagePartHitInfo { protected testEmailInfo e; protected String su; @XmlAttribute(name = "id") protected String id; @XmlAttribute(name = "sf") protected String sf; @XmlAttribute(name = "s") protected Long s; @XmlAttribute(name = "d") protected Long d; @XmlAttribute(name = "cid") protected Integer cid; @XmlAttribute(name = "mid") protected Integer mid; @XmlAttribute(name = "ct") protected String ct; @XmlAttribute(name = "name") protected String name; @XmlAttribute(name = "part") protected String part; /** * Gets the value of the e property. * * @return * possible object is * {@link testEmailInfo } * */ public testEmailInfo getE() { return e; } /** * Sets the value of the e property. * * @param value * allowed object is * {@link testEmailInfo } * */ public void setE(testEmailInfo value) { this.e = value; } /** * Gets the value of the su property. * * @return * possible object is * {@link String } * */ public String getSu() { return su; } /** * Sets the value of the su property. * * @param value * allowed object is * {@link String } * */ public void setSu(String value) { this.su = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the sf property. * * @return * possible object is * {@link String } * */ public String getSf() { return sf; } /** * Sets the value of the sf property. * * @param value * allowed object is * {@link String } * */ public void setSf(String value) { this.sf = value; } /** * Gets the value of the s property. * * @return * possible object is * {@link Long } * */ public Long getS() { return s; } /** * Sets the value of the s property. * * @param value * allowed object is * {@link Long } * */ public void setS(Long value) { this.s = value; } /** * Gets the value of the d property. * * @return * possible object is * {@link Long } * */ public Long getD() { return d; } /** * Sets the value of the d property. * * @param value * allowed object is * {@link Long } * */ public void setD(Long value) { this.d = value; } /** * Gets the value of the cid property. * * @return * possible object is * {@link Integer } * */ public Integer getCid() { return cid; } /** * Sets the value of the cid property. * * @param value * allowed object is * {@link Integer } * */ public void setCid(Integer value) { this.cid = value; } /** * Gets the value of the mid property. * * @return * possible object is * {@link Integer } * */ public Integer getMid() { return mid; } /** * Sets the value of the mid property. * * @param value * allowed object is * {@link Integer } * */ public void setMid(Integer value) { this.mid = value; } /** * Gets the value of the ct property. * * @return * possible object is * {@link String } * */ public String getCt() { return ct; } /** * Sets the value of the ct property. * * @param value * allowed object is * {@link String } * */ public void setCt(String value) { this.ct = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the part property. * * @return * possible object is * {@link String } * */ public String getPart() { return part; } /** * Sets the value of the part property. * * @param value * allowed object is * {@link String } * */ public void setPart(String value) { this.part = value; } }<|fim▁end|>
/** * <p>Java class for messagePartHitInfo complex type. *
<|file_name|>EidMap.java<|end_file_name|><|fim▁begin|>/** Copyright (c) 2011, Cisco Systems, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Cisco Systems, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.cisco.qte.jdtn.bp; import java.io.IOException; import java.io.PrintWriter; import java.util.HashMap; import java.util.logging.Level; import java.util.logging.Logger; import com.cisco.qte.jdtn.component.AbstractStartableComponent; import com.cisco.qte.jdtn.general.GeneralManagement; import com.cisco.qte.jdtn.general.JDtnException; import com.cisco.qte.jdtn.general.Utils; import com.cisco.qte.jdtn.general.XmlRDParser; import com.cisco.qte.jdtn.general.XmlRdParserException; /** * Mappings from IPN: to DTN: EndPointIds. Maintains two-way mappings * (equivalences) between a set of IPN: EndPointId and DTN: EndPointIds. */ public class EidMap extends AbstractStartableComponent { private static final Logger _logger = Logger.getLogger(EidMap.class.getCanonicalName()); private static EidMap _instance = null; private HashMap<EndPointId, IpnEndpointId> _dtnToIpnMap = new HashMap<EndPointId, IpnEndpointId>(); private HashMap<IpnEndpointId, EndPointId> _ipnToDtnMap = new HashMap<IpnEndpointId, EndPointId>(); /** * Get singleton Instance * @return Singleton instance */ public static EidMap getInstance() { if (_instance == null) { _instance = new EidMap(); } return _instance; } /** * Protected access constructor */ protected EidMap() { super("EidMap"); if (GeneralManagement.isDebugLogging()) { _logger.finer("EidMap()"); } } /** * Start this component */ @Override protected void startImpl() { if (GeneralManagement.isDebugLogging()) { _logger.finer("startImpl()"); } addDefaultMapping(); } /** * Stop this component */ @Override protected void stopImpl() { if (GeneralManagement.isDebugLogging()) { _logger.finer("stopImpl()"); } removeDefaultMapping(); } /** * Set to default state; clears all mappings */ public void setDefaults() { if (GeneralManagement.isDebugLogging()) { _logger.finer("setDefaults()"); } _dtnToIpnMap.clear(); _ipnToDtnMap.clear(); addDefaultMapping(); } /** * Parse from config file. It is assume that the parse is sitting on the * &lt; EidMap &gt; element. We parse all contained &lt; EidMapEntry &gt;<|fim▁hole|> * @throws XmlPullParserException on general parsing errors * @throws IOException On general I/O errors * @throws JDtnException on JDTN specific errors */ public void parse(XmlRDParser parser) throws XmlRdParserException, IOException, JDtnException { if (GeneralManagement.isDebugLogging()) { _logger.finer("parse()"); } // General structure of EidMap info: // <EidMap> // <EidMapEntry dtnEid='dtnEid' ipnEid='ipnEid /> // ... // </EidMap> // Parse each <EidMapEntry> XmlRDParser.EventType event = Utils.nextNonTextEvent(parser); while (event == XmlRDParser.EventType.START_ELEMENT) { if (!parser.getElementTag().equals("EidMapEntry")) { throw new BPException("Expecting <EidMapEntry>"); } // Get 'dtnEid' attribute String dtnEidStr = Utils.getStringAttribute(parser, "dtnEid"); if (dtnEidStr == null) { throw new BPException("Missing attribute 'dtnEid'"); } EndPointId dtnEid = EndPointId.createEndPointId(dtnEidStr); if (!dtnEid.getScheme().equals(EndPointId.DEFAULT_SCHEME)) { throw new BPException("First argument not 'dtn' Eid"); } // Get 'ipnEid' attribute String ipnEidStr = Utils.getStringAttribute(parser, "ipnEid"); if (ipnEidStr == null) { throw new BPException("Missing attribute 'ipnEid'"); } EndPointId ipnEid = EndPointId.createEndPointId(ipnEidStr); if (!ipnEid.getScheme().equals(IpnEndpointId.SCHEME_NAME) || !(ipnEid instanceof IpnEndpointId)) { throw new BPException("Second argument not 'ipn' Eid"); } // Add the mapping addMapping(dtnEid, (IpnEndpointId)ipnEid); // Parse </EidMapEntry> event = Utils.nextNonTextEvent(parser); if (event != XmlRDParser.EventType.END_ELEMENT || !parser.getElementTag().equals("EidMapEntry")) { throw new BPException("Expecting </EidMapEntry>"); } event = Utils.nextNonTextEvent(parser); } // Parse </EidMap> if (event != XmlRDParser.EventType.END_ELEMENT || !parser.getElementTag().equals("EidMap")) { throw new JDtnException("Expecting '</EidMap>'"); } } /** * Write EidMap to config file. We only do this if there are entries * in the map. * @param pw PrintWrite to output to */ public void writeConfig(PrintWriter pw) { if (GeneralManagement.isDebugLogging()) { _logger.finer("writeConfig()"); } if (EidMap.getInstance().size() > 0) { pw.println(" <EidMap>"); for (EndPointId dtnEid : _dtnToIpnMap.keySet()) { IpnEndpointId ipnEid = _dtnToIpnMap.get(dtnEid); if (!isDefaultMapping(dtnEid, ipnEid)) { pw.println(" <EidMapEntry"); pw.println(" dtnEid='" + dtnEid.getEndPointIdString() + "'"); pw.println(" ipnEid='" + ipnEid.getEndPointIdString() + "'"); pw.println(" />"); } } pw.println(" </EidMap>"); } } // Add an entry to map 'dtn:none' to 'ipn:0.0' private void addDefaultMapping() { if (GeneralManagement.isDebugLogging()) { _logger.finer("addDefaultMapping()"); } try { addMapping( EndPointId.DEFAULT_ENDPOINT_ID_STRING, IpnEndpointId.DEFAULT_IPNEID_STR); } catch (BPException e) { _logger.log(Level.SEVERE, "EidMap default mapping", e); } } // Remove entry mapping 'dtn:none' to 'ipn:0.0' private void removeDefaultMapping() { if (GeneralManagement.isDebugLogging()) { _logger.finer("removeDefaultMapping()"); } try { removeMapping(EndPointId.DEFAULT_ENDPOINT_ID_STRING); } catch (BPException e) { _logger.log(Level.SEVERE, "EidMap default mapping", e); } } // Determine if given mapping is 'dtn:none' <=> 'ipn:0.0' private boolean isDefaultMapping(EndPointId dtnEid, IpnEndpointId ipnEid) { if (dtnEid.getEndPointIdString().equalsIgnoreCase(EndPointId.DEFAULT_ENDPOINT_ID_STRING) && ipnEid.getEndPointIdString().equalsIgnoreCase(IpnEndpointId.DEFAULT_IPNEID_STR)) { return true; } return false; } /** * Add a mapping between a 'dtn' Eid and an 'ipn' Eid * @param dtnEidStr String containing the 'dtn' Eid * @param ipnEidStr String containing the 'ipn' Eid * @throws BPException if there is already a mapping for dtnEid <=> ipnEid, * or if dtnEidStr is not a valid 'dtn' scheme EndPointId, * or if ipnEidStr is not a valid 'ipn' scheme EndPointId. */ public void addMapping(String dtnEidStr, String ipnEidStr) throws BPException { if (GeneralManagement.isDebugLogging()) { _logger.finer("addMapping(<String>" + dtnEidStr + " <=> " + ipnEidStr + ")"); } EndPointId dtnEid = EndPointId.createEndPointId(dtnEidStr); IpnEndpointId ipnEid = new IpnEndpointId(ipnEidStr); addMapping(dtnEid, ipnEid); } /** * Add a mapping between a 'dtn' Eid and an 'ipn' Eid * @param dtnEid The 'dtn' Eid * @param ipnEid The 'ipn' Eid * @throws BPException if there is already a mapping for dtnEid <=> ipnEid, * or if dtnEid is not a 'dtn' scheme EndPointId. */ public synchronized void addMapping(EndPointId dtnEid, IpnEndpointId ipnEid) throws BPException { if (GeneralManagement.isDebugLogging()) { _logger.finer("addMapping(" + dtnEid.getEndPointIdString() + " <=> " + ipnEid.getEndPointIdString() + ")"); } if (!dtnEid.getScheme().equals(EndPointId.DEFAULT_SCHEME)) { throw new BPException("First argument is not a 'dtn' EndPointId"); } if (_dtnToIpnMap.containsKey(dtnEid)) { if (_ipnToDtnMap.containsKey(ipnEid)) { // Full Mapping already exists; silently ignore return; } if (GeneralManagement.isDebugLogging()) { _logger.finer("addMapping(" + dtnEid.getEndPointIdString() + " <=> " + ipnEid.getEndPointIdString() + ") Entry already exists"); _logger.finest(dump("", true)); } throw new BPException("There is already a mapping for DTN EID: " + dtnEid.getEndPointIdString()); } if (_ipnToDtnMap.containsKey(ipnEid)) { throw new BPException("There is already a mapping for IPN EID: " + ipnEid.getEndPointIdString()); } _dtnToIpnMap.put(dtnEid, ipnEid); _ipnToDtnMap.put(ipnEid, dtnEid); } /** * Remove a mapping between a 'dtn' Eid and an 'ipn' Eid * @param dtnEidStr The 'dtn' Eid String * @throws BPException If no mapping, or dtnEidStr poorly formatted */ public synchronized void removeMapping(String dtnEidStr) throws BPException { if (GeneralManagement.isDebugLogging()) { _logger.finer("removeMapping(" + dtnEidStr + ")"); } EndPointId dtnEid = EndPointId.createEndPointId(dtnEidStr); IpnEndpointId ipnEid = getIpnEid(dtnEid); if (ipnEid == null) { throw new BPException("No mapping for " + dtnEid.getEndPointIdString()); } removeMapping(dtnEid, ipnEid); } /** * Remove a mapping between a 'dtn' Eid and an 'ipn' Eid * @param dtnEid The 'dtn' Eid * @param ipnEid The 'ipn' Eid * @throws BPException if there is not a mapping for dtnEid <=> ipnEid, * or if dtnEid is not a 'dtn' scheme EndPointId. */ public synchronized void removeMapping(EndPointId dtnEid, IpnEndpointId ipnEid) throws BPException { if (GeneralManagement.isDebugLogging()) { _logger.finer("removeMapping(" + dtnEid.getEndPointIdString() + " <=> " + ipnEid.getEndPointIdString() + ")"); } if (!_dtnToIpnMap.containsKey(dtnEid)) { throw new BPException("There is not a mapping for DTN EID: " + dtnEid.getEndPointIdString()); } if (!_ipnToDtnMap.containsKey(ipnEid)) { throw new BPException("There is not a mapping for IPN EID: " + ipnEid.getEndPointIdString()); } _dtnToIpnMap.remove(dtnEid); _ipnToDtnMap.remove(ipnEid); } /** * Dump this object * @param indent Amount of indentation * @param detailed if want detailed dump * @return String containing dump */ @Override public synchronized String dump(String indent, boolean detailed) { StringBuilder sb = new StringBuilder(indent + "EidMap\n"); for (EndPointId dtnEid : _dtnToIpnMap.keySet()) { sb.append( indent + " DtnEid=" + dtnEid.getEndPointIdString() + " <=> IpnEid=" + _dtnToIpnMap.get(dtnEid).getEndPointIdString() + "\n"); } return sb.toString(); } /** * Get the IPN Eid mapped to given DTN Eid * @param dtnEidStr Given DTN Eid String * @return Mapped IPN Eid or null if none mapped * @throws BPException if dtnEidStr is poorly formed */ public String getIpnEidStr(String dtnEidStr) throws BPException { if (GeneralManagement.isDebugLogging()) { _logger.finer("getIpnEidStr(" + dtnEidStr + ")"); } EndPointId dtnEid = EndPointId.createEndPointId(dtnEidStr); IpnEndpointId ipnEid = getIpnEid(dtnEid); if (ipnEid == null) { if (GeneralManagement.isDebugLogging()) { _logger.finer("getIpnEidStr(" + dtnEidStr + ") = null"); } return null; } if (GeneralManagement.isDebugLogging()) { _logger.finer("removeMapping(" + dtnEidStr + ") = " + ipnEid.getEndPointIdString()); } return ipnEid.getEndPointIdString(); } /** * Get the IPN Eid mapped to given DTN Eid * @param dtnEid Given DTN Eid * @return Mapped IPN Eid or null if none mapped */ public synchronized IpnEndpointId getIpnEid(EndPointId dtnEid) { IpnEndpointId ipnEid = _dtnToIpnMap.get(dtnEid); if (ipnEid == null) { if (GeneralManagement.isDebugLogging()) { _logger.finer("getIpnEid(" + dtnEid.getEndPointIdString() + ") = null"); } return null; } if (GeneralManagement.isDebugLogging()) { _logger.finer("getIpnEid(" + dtnEid.getEndPointIdString() + ") = " + ipnEid.getEndPointIdString()); } return ipnEid; } /** * Get the DTN Eid mapped to given IPN Eid * @param ipnEidStr Given IPN Eid String * @return Mapped DTN Eid String or null if none mapped * @throws BPException if ipnEidStr is poorly formed */ public String getDtnEidStr(String ipnEidStr) throws BPException { IpnEndpointId ipnEid = new IpnEndpointId(ipnEidStr); EndPointId dtnEid = getDtnEid(ipnEid); if (dtnEid == null) { if (GeneralManagement.isDebugLogging()) { _logger.finer("getDtnEidStr(" + ipnEidStr + ") = null"); } return null; } if (GeneralManagement.isDebugLogging()) { _logger.finer("getDtnEidStr(" + ipnEidStr + ") = " + dtnEid.getEndPointIdString()); } return dtnEid.getEndPointIdString(); } /** * Get the DTN Eid mapped to given IPN Eid * @param ipnEid Given IPN Eid * @return Mapped DTN Eid or null if none mapped */ public synchronized EndPointId getDtnEid(IpnEndpointId ipnEid) { EndPointId dtnEid = _ipnToDtnMap.get(ipnEid); if (dtnEid == null) { if (GeneralManagement.isDebugLogging()) { _logger.finer("getDtnEid(" + ipnEid.getEndPointIdString() + ") = null"); } return null; } if (GeneralManagement.isDebugLogging()) { _logger.finer("getDtnEid(" + ipnEid.getEndPointIdString() + ") = " + dtnEid.getEndPointIdString()); } return dtnEid; } /** * Get the number of mappings * @return Number of mappings */ public int size() { return _dtnToIpnMap.size(); } }<|fim▁end|>
* sub-elements, adding a Dtn <-> Ipn EID Mapping for each. We also * parse the ending &lt; /EidMap &gt; tag. * @param parser The config file parser
<|file_name|>table_rowgroup.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! CSS table formatting contexts. #![deny(unsafe_code)] use app_units::Au; use block::{BlockFlow, ISizeAndMarginsComputer}; use context::LayoutContext; use display_list_builder::DisplayListBuildState; use euclid::Point2D; use flow::{Flow, FlowClass, OpaqueFlow}; use fragment::{Fragment, FragmentBorderBoxIterator, Overflow}; use gfx::display_list::StackingContext; use gfx_traits::StackingContextId; use layout_debug; use rustc_serialize::{Encodable, Encoder}; use std::fmt; use std::iter::{IntoIterator, Iterator, Peekable}; use std::sync::Arc; use style::computed_values::{border_collapse, border_spacing}; use style::logical_geometry::{LogicalSize, WritingMode}; use style::properties::{ComputedValues, ServoComputedValues}; use style::servo::SharedStyleContext; use table::{ColumnComputedInlineSize, ColumnIntrinsicInlineSize, InternalTable, TableLikeFlow}; use table_row; use util::print_tree::PrintTree; /// A table formatting context. pub struct TableRowGroupFlow { /// Fields common to all block flows. pub block_flow: BlockFlow, /// Information about the intrinsic inline-sizes of each column. pub column_intrinsic_inline_sizes: Vec<ColumnIntrinsicInlineSize>, /// Information about the actual inline sizes of each column. pub column_computed_inline_sizes: Vec<ColumnComputedInlineSize>, /// The spacing for this rowgroup. pub spacing: border_spacing::T, /// The direction of the columns, propagated down from the table during the inline-size /// assignment phase. pub table_writing_mode: WritingMode, /// The final width of the borders in the inline direction for each cell, computed by the /// entire table and pushed down into each row during inline size computation. pub collapsed_inline_direction_border_widths_for_table: Vec<Au>, /// The final width of the borders in the block direction for each cell, computed by the /// entire table and pushed down into each row during inline size computation. pub collapsed_block_direction_border_widths_for_table: Vec<Au>, } impl Encodable for TableRowGroupFlow { fn encode<S: Encoder>(&self, e: &mut S) -> Result<(), S::Error> { self.block_flow.encode(e) } } impl TableRowGroupFlow { pub fn from_fragment(fragment: Fragment) -> TableRowGroupFlow { let writing_mode = fragment.style().writing_mode; TableRowGroupFlow { block_flow: BlockFlow::from_fragment(fragment, None), column_intrinsic_inline_sizes: Vec::new(), column_computed_inline_sizes: Vec::new(), spacing: border_spacing::T { horizontal: Au(0), vertical: Au(0), }, table_writing_mode: writing_mode, collapsed_inline_direction_border_widths_for_table: Vec::new(), collapsed_block_direction_border_widths_for_table: Vec::new(), } } pub fn populate_collapsed_border_spacing<'a, I>( &mut self, collapsed_inline_direction_border_widths_for_table: &[Au], collapsed_block_direction_border_widths_for_table: &mut Peekable<I>) where I: Iterator<Item=&'a Au> { self.collapsed_inline_direction_border_widths_for_table.clear(); self.collapsed_inline_direction_border_widths_for_table .extend(collapsed_inline_direction_border_widths_for_table.into_iter().map(|x| *x)); for _ in 0..self.block_flow.base.children.len() { if let Some(collapsed_block_direction_border_width_for_table) = collapsed_block_direction_border_widths_for_table.next() { self.collapsed_block_direction_border_widths_for_table .push(*collapsed_block_direction_border_width_for_table) } } if let Some(collapsed_block_direction_border_width_for_table) = collapsed_block_direction_border_widths_for_table.peek() { self.collapsed_block_direction_border_widths_for_table .push(**collapsed_block_direction_border_width_for_table) } } } impl Flow for TableRowGroupFlow { fn class(&self) -> FlowClass { FlowClass::TableRowGroup } fn as_mut_table_rowgroup(&mut self) -> &mut TableRowGroupFlow { self } fn as_table_rowgroup(&self) -> &TableRowGroupFlow { self } fn as_mut_block(&mut self) -> &mut BlockFlow { &mut self.block_flow } fn as_block(&self) -> &BlockFlow { &self.block_flow } fn column_intrinsic_inline_sizes(&mut self) -> &mut Vec<ColumnIntrinsicInlineSize> { &mut self.column_intrinsic_inline_sizes } fn column_computed_inline_sizes(&mut self) -> &mut Vec<ColumnComputedInlineSize> { &mut self.column_computed_inline_sizes } fn bubble_inline_sizes(&mut self) { let _scope = layout_debug_scope!("table_rowgroup::bubble_inline_sizes {:x}", self.block_flow.base.debug_id()); // Proper calculation of intrinsic sizes in table layout requires access to the entire // table, which we don't have yet. Defer to our parent. } /// Recursively (top-down) determines the actual inline-size of child contexts and fragments. /// When called on this context, the context has had its inline-size set by the parent context. fn assign_inline_sizes(&mut self, shared_context: &SharedStyleContext) { let _scope = layout_debug_scope!("table_rowgroup::assign_inline_sizes {:x}", self.block_flow.base.debug_id()); debug!("assign_inline_sizes({}): assigning inline_size for flow", "table_rowgroup"); // The position was set to the containing block by the flow's parent. let containing_block_inline_size = self.block_flow.base.block_container_inline_size; let (inline_start_content_edge, inline_end_content_edge) = (Au(0), Au(0)); let content_inline_size = containing_block_inline_size; let border_collapse = self.block_flow.fragment.style.get_inheritedtable().border_collapse; let inline_size_computer = InternalTable { border_collapse: border_collapse, }; inline_size_computer.compute_used_inline_size(&mut self.block_flow, shared_context, containing_block_inline_size); let column_computed_inline_sizes = &self.column_computed_inline_sizes; let border_spacing = self.spacing; let table_writing_mode = self.table_writing_mode; let collapsed_inline_direction_border_widths_for_table = &self.collapsed_inline_direction_border_widths_for_table; let mut collapsed_block_direction_border_widths_for_table = self.collapsed_block_direction_border_widths_for_table.iter().peekable(); self.block_flow.propagate_assigned_inline_size_to_children(shared_context, inline_start_content_edge, inline_end_content_edge, content_inline_size, |child_flow, _child_index, _content_inline_size, _writing_mode, _inline_start_margin_edge, _inline_end_margin_edge| { table_row::propagate_column_inline_sizes_to_child( child_flow, table_writing_mode, column_computed_inline_sizes, &border_spacing); if border_collapse == border_collapse::T::collapse { let child_table_row = child_flow.as_mut_table_row(); child_table_row.populate_collapsed_border_spacing( collapsed_inline_direction_border_widths_for_table, &mut collapsed_block_direction_border_widths_for_table); } }); } fn assign_block_size<'a>(&mut self, _: &'a LayoutContext<'a>) { debug!("assign_block_size: assigning block_size for table_rowgroup"); self.block_flow.assign_block_size_for_table_like_flow(self.spacing.vertical) } fn compute_absolute_position(&mut self, layout_context: &LayoutContext) { self.block_flow.compute_absolute_position(layout_context) } fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au) { self.block_flow.update_late_computed_inline_position_if_necessary(inline_position) } fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au) { self.block_flow.update_late_computed_block_position_if_necessary(block_position) } fn build_display_list(&mut self, state: &mut DisplayListBuildState) { debug!("build_display_list_table_rowgroup: same process as block flow"); self.block_flow.build_display_list(state); } fn collect_stacking_contexts(&mut self, parent_id: StackingContextId, contexts: &mut Vec<Box<StackingContext>>) -> StackingContextId { self.block_flow.collect_stacking_contexts(parent_id, contexts) } fn repair_style(&mut self, new_style: &Arc<ServoComputedValues>) { self.block_flow.repair_style(new_style) } fn compute_overflow(&self) -> Overflow { self.block_flow.compute_overflow() } fn generated_containing_block_size(&self, flow: OpaqueFlow) -> LogicalSize<Au> {<|fim▁hole|> fn iterate_through_fragment_border_boxes(&self, iterator: &mut FragmentBorderBoxIterator, level: i32, stacking_context_position: &Point2D<Au>) { self.block_flow.iterate_through_fragment_border_boxes(iterator, level, stacking_context_position) } fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) { self.block_flow.mutate_fragments(mutator) } fn print_extra_flow_children(&self, print_tree: &mut PrintTree) { self.block_flow.print_extra_flow_children(print_tree); } } impl fmt::Debug for TableRowGroupFlow { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "TableRowGroupFlow: {:?}", self.block_flow) } }<|fim▁end|>
self.block_flow.generated_containing_block_size(flow) }
<|file_name|>data_source_aws_network_acls.go<|end_file_name|><|fim▁begin|>package aws import ( "errors" "fmt"<|fim▁hole|> "github.com/hashicorp/terraform/helper/resource" "github.com/hashicorp/terraform/helper/schema" ) func dataSourceAwsNetworkAcls() *schema.Resource { return &schema.Resource{ Read: dataSourceAwsNetworkAclsRead, Schema: map[string]*schema.Schema{ "filter": ec2CustomFiltersSchema(), "tags": tagsSchemaComputed(), "vpc_id": { Type: schema.TypeString, Optional: true, }, "ids": { Type: schema.TypeSet, Computed: true, Elem: &schema.Schema{Type: schema.TypeString}, Set: schema.HashString, }, }, } } func dataSourceAwsNetworkAclsRead(d *schema.ResourceData, meta interface{}) error { conn := meta.(*AWSClient).ec2conn req := &ec2.DescribeNetworkAclsInput{} if v, ok := d.GetOk("vpc_id"); ok { req.Filters = buildEC2AttributeFilterList( map[string]string{ "vpc-id": v.(string), }, ) } filters, filtersOk := d.GetOk("filter") tags, tagsOk := d.GetOk("tags") if tagsOk { req.Filters = append(req.Filters, buildEC2TagFilterList( tagsFromMap(tags.(map[string]interface{})), )...) } if filtersOk { req.Filters = append(req.Filters, buildEC2CustomFilterList( filters.(*schema.Set), )...) } if len(req.Filters) == 0 { // Don't send an empty filters list; the EC2 API won't accept it. req.Filters = nil } log.Printf("[DEBUG] DescribeNetworkAcls %s\n", req) resp, err := conn.DescribeNetworkAcls(req) if err != nil { return err } if resp == nil || len(resp.NetworkAcls) == 0 { return errors.New("no matching network ACLs found") } networkAcls := make([]string, 0) for _, networkAcl := range resp.NetworkAcls { networkAcls = append(networkAcls, aws.StringValue(networkAcl.NetworkAclId)) } d.SetId(resource.UniqueId()) if err := d.Set("ids", networkAcls); err != nil { return fmt.Errorf("Error setting network ACL ids: %s", err) } return nil }<|fim▁end|>
"log" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/ec2"
<|file_name|>FunctionalAnalysisArchitectureImpl.java<|end_file_name|><|fim▁begin|>/** * <copyright> * </copyright> * * $Id$ */ package org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.impl; import java.util.Collection; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.EObjectContainmentWithInverseEList; import org.eclipse.emf.ecore.util.InternalEList; import org.obeonetwork.dsl.east_adl.core.impl.EASTADLArtifactImpl; import org.obeonetwork.dsl.east_adl.structure.common.ConnectorSignal; import org.obeonetwork.dsl.east_adl.structure.common.DesignDataType; import org.obeonetwork.dsl.east_adl.structure.common.ImplementationDataType; import org.obeonetwork.dsl.east_adl.structure.common.OperationCall; import org.obeonetwork.dsl.east_adl.structure.common.TypeAssociation; import org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.AnalysisFunction; import org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.FunctionalAnalysisArchitecture; import org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.FunctionalDevice; import org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.Functional_analysis_architecturePackage; import org.obeonetwork.dsl.east_adl.structure.functional_design_architecture.FunctionalDesignArchitecture; import org.obeonetwork.dsl.east_adl.structure.functional_design_architecture.Functional_design_architecturePackage; import org.obeonetwork.dsl.east_adl.structure.vehicle_feature_model.VehicleFeatureModel; import org.obeonetwork.dsl.east_adl.structure.vehicle_feature_model.Vehicle_feature_modelPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Functional Analysis Architecture</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.impl.FunctionalAnalysisArchitectureImpl#getAnalysisFunctions <em>Analysis Functions</em>}</li> * <li>{@link org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.impl.FunctionalAnalysisArchitectureImpl#getFunctionalDevices <em>Functional Devices</em>}</li> * <li>{@link org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.impl.FunctionalAnalysisArchitectureImpl#getVehicleModel <em>Vehicle Model</em>}</li> * <li>{@link org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.impl.FunctionalAnalysisArchitectureImpl#getDesignArchitecture <em>Design Architecture</em>}</li> * <li>{@link org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.impl.FunctionalAnalysisArchitectureImpl#getDesignDataTypes <em>Design Data Types</em>}</li> * <li>{@link org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.impl.FunctionalAnalysisArchitectureImpl#getConnectorSignals <em>Connector Signals</em>}</li> * <li>{@link org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.impl.FunctionalAnalysisArchitectureImpl#getOperationCalls <em>Operation Calls</em>}</li> * <li>{@link org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.impl.FunctionalAnalysisArchitectureImpl#getTypeAssociations <em>Type Associations</em>}</li> * <li>{@link org.obeonetwork.dsl.east_adl.structure.functional_analysis_architecture.impl.FunctionalAnalysisArchitectureImpl#getImplementationDataTypes <em>Implementation Data Types</em>}</li> * </ul> * </p> * * @generated */ public class FunctionalAnalysisArchitectureImpl extends EASTADLArtifactImpl implements FunctionalAnalysisArchitecture { /** * The cached value of the '{@link #getAnalysisFunctions() <em>Analysis Functions</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAnalysisFunctions() * @generated * @ordered */ protected EList<AnalysisFunction> analysisFunctions; /** * The cached value of the '{@link #getFunctionalDevices() <em>Functional Devices</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getFunctionalDevices() * @generated * @ordered */ protected EList<FunctionalDevice> functionalDevices; /** * The cached value of the '{@link #getVehicleModel() <em>Vehicle Model</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getVehicleModel() * @generated * @ordered */ protected VehicleFeatureModel vehicleModel; /** * The cached value of the '{@link #getDesignArchitecture() <em>Design Architecture</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getDesignArchitecture() * @generated * @ordered */ protected FunctionalDesignArchitecture designArchitecture; /** * The cached value of the '{@link #getDesignDataTypes() <em>Design Data Types</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getDesignDataTypes() * @generated * @ordered */ protected EList<DesignDataType> designDataTypes; /** * The cached value of the '{@link #getConnectorSignals() <em>Connector Signals</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getConnectorSignals() * @generated * @ordered */ protected EList<ConnectorSignal> connectorSignals; /** * The cached value of the '{@link #getOperationCalls() <em>Operation Calls</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getOperationCalls() * @generated * @ordered */ protected EList<OperationCall> operationCalls; /** * The cached value of the '{@link #getTypeAssociations() <em>Type Associations</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTypeAssociations() * @generated * @ordered */ protected EList<TypeAssociation> typeAssociations; /** * The cached value of the '{@link #getImplementationDataTypes() <em>Implementation Data Types</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getImplementationDataTypes() * @generated * @ordered */ protected EList<ImplementationDataType> implementationDataTypes; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected FunctionalAnalysisArchitectureImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return Functional_analysis_architecturePackage.Literals.FUNCTIONAL_ANALYSIS_ARCHITECTURE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<AnalysisFunction> getAnalysisFunctions() { if (analysisFunctions == null) { analysisFunctions = new EObjectContainmentEList<AnalysisFunction>(AnalysisFunction.class, this, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__ANALYSIS_FUNCTIONS); } return analysisFunctions; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<FunctionalDevice> getFunctionalDevices() { if (functionalDevices == null) { functionalDevices = new EObjectContainmentWithInverseEList<FunctionalDevice>(FunctionalDevice.class, this, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__FUNCTIONAL_DEVICES, Functional_analysis_architecturePackage.FUNCTIONAL_DEVICE__OWNING_ARTIFACT); } return functionalDevices; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public VehicleFeatureModel getVehicleModel() { if (vehicleModel != null && vehicleModel.eIsProxy()) { InternalEObject oldVehicleModel = (InternalEObject)vehicleModel; vehicleModel = (VehicleFeatureModel)eResolveProxy(oldVehicleModel); if (vehicleModel != oldVehicleModel) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__VEHICLE_MODEL, oldVehicleModel, vehicleModel)); } } return vehicleModel; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public VehicleFeatureModel basicGetVehicleModel() { return vehicleModel; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetVehicleModel(VehicleFeatureModel newVehicleModel, NotificationChain msgs) { VehicleFeatureModel oldVehicleModel = vehicleModel; vehicleModel = newVehicleModel; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__VEHICLE_MODEL, oldVehicleModel, newVehicleModel); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setVehicleModel(VehicleFeatureModel newVehicleModel) { if (newVehicleModel != vehicleModel) { NotificationChain msgs = null; if (vehicleModel != null) msgs = ((InternalEObject)vehicleModel).eInverseRemove(this, Vehicle_feature_modelPackage.VEHICLE_FEATURE_MODEL__ANALYSIS_ARCHITECTURE, VehicleFeatureModel.class, msgs); if (newVehicleModel != null) msgs = ((InternalEObject)newVehicleModel).eInverseAdd(this, Vehicle_feature_modelPackage.VEHICLE_FEATURE_MODEL__ANALYSIS_ARCHITECTURE, VehicleFeatureModel.class, msgs); msgs = basicSetVehicleModel(newVehicleModel, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__VEHICLE_MODEL, newVehicleModel, newVehicleModel)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public FunctionalDesignArchitecture getDesignArchitecture() { if (designArchitecture != null && designArchitecture.eIsProxy()) { InternalEObject oldDesignArchitecture = (InternalEObject)designArchitecture; designArchitecture = (FunctionalDesignArchitecture)eResolveProxy(oldDesignArchitecture); if (designArchitecture != oldDesignArchitecture) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_ARCHITECTURE, oldDesignArchitecture, designArchitecture)); } } return designArchitecture; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public FunctionalDesignArchitecture basicGetDesignArchitecture() { return designArchitecture; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetDesignArchitecture(FunctionalDesignArchitecture newDesignArchitecture, NotificationChain msgs) { FunctionalDesignArchitecture oldDesignArchitecture = designArchitecture; designArchitecture = newDesignArchitecture; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_ARCHITECTURE, oldDesignArchitecture, newDesignArchitecture); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setDesignArchitecture(FunctionalDesignArchitecture newDesignArchitecture) { if (newDesignArchitecture != designArchitecture) { NotificationChain msgs = null; if (designArchitecture != null) msgs = ((InternalEObject)designArchitecture).eInverseRemove(this, Functional_design_architecturePackage.FUNCTIONAL_DESIGN_ARCHITECTURE__ANALYSIS_ARCHITECTURE, FunctionalDesignArchitecture.class, msgs); if (newDesignArchitecture != null) msgs = ((InternalEObject)newDesignArchitecture).eInverseAdd(this, Functional_design_architecturePackage.FUNCTIONAL_DESIGN_ARCHITECTURE__ANALYSIS_ARCHITECTURE, FunctionalDesignArchitecture.class, msgs); msgs = basicSetDesignArchitecture(newDesignArchitecture, msgs);<|fim▁hole|> else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_ARCHITECTURE, newDesignArchitecture, newDesignArchitecture)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<DesignDataType> getDesignDataTypes() { if (designDataTypes == null) { designDataTypes = new EObjectContainmentEList<DesignDataType>(DesignDataType.class, this, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_DATA_TYPES); } return designDataTypes; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<ConnectorSignal> getConnectorSignals() { if (connectorSignals == null) { connectorSignals = new EObjectContainmentEList<ConnectorSignal>(ConnectorSignal.class, this, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__CONNECTOR_SIGNALS); } return connectorSignals; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<OperationCall> getOperationCalls() { if (operationCalls == null) { operationCalls = new EObjectContainmentEList<OperationCall>(OperationCall.class, this, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__OPERATION_CALLS); } return operationCalls; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<TypeAssociation> getTypeAssociations() { if (typeAssociations == null) { typeAssociations = new EObjectContainmentEList<TypeAssociation>(TypeAssociation.class, this, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__TYPE_ASSOCIATIONS); } return typeAssociations; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<ImplementationDataType> getImplementationDataTypes() { if (implementationDataTypes == null) { implementationDataTypes = new EObjectContainmentEList<ImplementationDataType>(ImplementationDataType.class, this, Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__IMPLEMENTATION_DATA_TYPES); } return implementationDataTypes; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__FUNCTIONAL_DEVICES: return ((InternalEList<InternalEObject>)(InternalEList<?>)getFunctionalDevices()).basicAdd(otherEnd, msgs); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__VEHICLE_MODEL: if (vehicleModel != null) msgs = ((InternalEObject)vehicleModel).eInverseRemove(this, Vehicle_feature_modelPackage.VEHICLE_FEATURE_MODEL__ANALYSIS_ARCHITECTURE, VehicleFeatureModel.class, msgs); return basicSetVehicleModel((VehicleFeatureModel)otherEnd, msgs); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_ARCHITECTURE: if (designArchitecture != null) msgs = ((InternalEObject)designArchitecture).eInverseRemove(this, Functional_design_architecturePackage.FUNCTIONAL_DESIGN_ARCHITECTURE__ANALYSIS_ARCHITECTURE, FunctionalDesignArchitecture.class, msgs); return basicSetDesignArchitecture((FunctionalDesignArchitecture)otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__ANALYSIS_FUNCTIONS: return ((InternalEList<?>)getAnalysisFunctions()).basicRemove(otherEnd, msgs); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__FUNCTIONAL_DEVICES: return ((InternalEList<?>)getFunctionalDevices()).basicRemove(otherEnd, msgs); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__VEHICLE_MODEL: return basicSetVehicleModel(null, msgs); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_ARCHITECTURE: return basicSetDesignArchitecture(null, msgs); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_DATA_TYPES: return ((InternalEList<?>)getDesignDataTypes()).basicRemove(otherEnd, msgs); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__CONNECTOR_SIGNALS: return ((InternalEList<?>)getConnectorSignals()).basicRemove(otherEnd, msgs); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__OPERATION_CALLS: return ((InternalEList<?>)getOperationCalls()).basicRemove(otherEnd, msgs); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__TYPE_ASSOCIATIONS: return ((InternalEList<?>)getTypeAssociations()).basicRemove(otherEnd, msgs); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__IMPLEMENTATION_DATA_TYPES: return ((InternalEList<?>)getImplementationDataTypes()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__ANALYSIS_FUNCTIONS: return getAnalysisFunctions(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__FUNCTIONAL_DEVICES: return getFunctionalDevices(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__VEHICLE_MODEL: if (resolve) return getVehicleModel(); return basicGetVehicleModel(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_ARCHITECTURE: if (resolve) return getDesignArchitecture(); return basicGetDesignArchitecture(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_DATA_TYPES: return getDesignDataTypes(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__CONNECTOR_SIGNALS: return getConnectorSignals(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__OPERATION_CALLS: return getOperationCalls(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__TYPE_ASSOCIATIONS: return getTypeAssociations(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__IMPLEMENTATION_DATA_TYPES: return getImplementationDataTypes(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__ANALYSIS_FUNCTIONS: getAnalysisFunctions().clear(); getAnalysisFunctions().addAll((Collection<? extends AnalysisFunction>)newValue); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__FUNCTIONAL_DEVICES: getFunctionalDevices().clear(); getFunctionalDevices().addAll((Collection<? extends FunctionalDevice>)newValue); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__VEHICLE_MODEL: setVehicleModel((VehicleFeatureModel)newValue); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_ARCHITECTURE: setDesignArchitecture((FunctionalDesignArchitecture)newValue); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_DATA_TYPES: getDesignDataTypes().clear(); getDesignDataTypes().addAll((Collection<? extends DesignDataType>)newValue); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__CONNECTOR_SIGNALS: getConnectorSignals().clear(); getConnectorSignals().addAll((Collection<? extends ConnectorSignal>)newValue); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__OPERATION_CALLS: getOperationCalls().clear(); getOperationCalls().addAll((Collection<? extends OperationCall>)newValue); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__TYPE_ASSOCIATIONS: getTypeAssociations().clear(); getTypeAssociations().addAll((Collection<? extends TypeAssociation>)newValue); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__IMPLEMENTATION_DATA_TYPES: getImplementationDataTypes().clear(); getImplementationDataTypes().addAll((Collection<? extends ImplementationDataType>)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__ANALYSIS_FUNCTIONS: getAnalysisFunctions().clear(); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__FUNCTIONAL_DEVICES: getFunctionalDevices().clear(); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__VEHICLE_MODEL: setVehicleModel((VehicleFeatureModel)null); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_ARCHITECTURE: setDesignArchitecture((FunctionalDesignArchitecture)null); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_DATA_TYPES: getDesignDataTypes().clear(); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__CONNECTOR_SIGNALS: getConnectorSignals().clear(); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__OPERATION_CALLS: getOperationCalls().clear(); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__TYPE_ASSOCIATIONS: getTypeAssociations().clear(); return; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__IMPLEMENTATION_DATA_TYPES: getImplementationDataTypes().clear(); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__ANALYSIS_FUNCTIONS: return analysisFunctions != null && !analysisFunctions.isEmpty(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__FUNCTIONAL_DEVICES: return functionalDevices != null && !functionalDevices.isEmpty(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__VEHICLE_MODEL: return vehicleModel != null; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_ARCHITECTURE: return designArchitecture != null; case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__DESIGN_DATA_TYPES: return designDataTypes != null && !designDataTypes.isEmpty(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__CONNECTOR_SIGNALS: return connectorSignals != null && !connectorSignals.isEmpty(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__OPERATION_CALLS: return operationCalls != null && !operationCalls.isEmpty(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__TYPE_ASSOCIATIONS: return typeAssociations != null && !typeAssociations.isEmpty(); case Functional_analysis_architecturePackage.FUNCTIONAL_ANALYSIS_ARCHITECTURE__IMPLEMENTATION_DATA_TYPES: return implementationDataTypes != null && !implementationDataTypes.isEmpty(); } return super.eIsSet(featureID); } } //FunctionalAnalysisArchitectureImpl<|fim▁end|>
if (msgs != null) msgs.dispatch(); }
<|file_name|>speaker.py<|end_file_name|><|fim▁begin|>import pygame import logging from tools.action import Action class Speaker(Action): def __init__(self, id, params): super(Speaker, self).__init__(id, params) try: self.path_to_audio = params["path_to_audio"] self.repetitions = int(params["repetitions"]) except ValueError as ve: # if repetitions can't be parsed as int logging.error("Speaker: Wasn't able to initialize the device, please check your configuration: %s" % ve) self.corrupted = True return except KeyError as ke: # if config parameters are missing in file logging.error("Speaker: Wasn't able to initialize the device, it seems there is a config parameter missing: %s" % ke) self.corrupted = True return logging.debug("Speaker: Audio device initialized") def play_audio(self): logging.debug("Speaker: Trying to play audio") pygame.mixer.init() try: pygame.mixer.music.load(self.path_to_audio) except Exception as e: # audio file doesn't exist or is not playable logging.error("Speaker: Wasn't able to load audio file: %s" % e) pygame.mixer.quit() return pygame.mixer.music.set_volume(1) for i in range(0, self.repetitions): pygame.mixer.music.rewind() pygame.mixer.music.play() while pygame.mixer.music.get_busy(): continue<|fim▁hole|> logging.debug("Speaker: Finished playing audio") def execute(self): if not self.corrupted: self.play_audio() else: logging.error("Speaker: Wasn't able to play sound because of an initialization error") def cleanup(self): logging.debug("Speaker: No cleanup necessary at the moment")<|fim▁end|>
pygame.mixer.quit()
<|file_name|>web_form.js<|end_file_name|><|fim▁begin|>frappe.provide("frappe.ui"); frappe.provide("frappe.web_form"); import EventEmitterMixin from '../../frappe/event_emitter'; export default class WebForm extends frappe.ui.FieldGroup { constructor(opts) { super(); Object.assign(this, opts); frappe.web_form = this; frappe.web_form.events = {}; Object.assign(frappe.web_form.events, EventEmitterMixin); } prepare(web_form_doc, doc) { Object.assign(this, web_form_doc); this.fields = web_form_doc.web_form_fields; this.doc = doc; } make() { super.make(); this.set_field_values(); if (this.introduction_text) this.set_form_description(this.introduction_text); if (this.allow_print && !this.is_new) this.setup_print_button(); if (this.allow_delete && !this.is_new) this.setup_delete_button(); if (this.is_new) this.setup_cancel_button(); this.setup_primary_action(); $(".link-btn").remove(); // webform client script frappe.init_client_script && frappe.init_client_script(); frappe.web_form.events.trigger('after_load'); this.after_load && this.after_load(); } on(fieldname, handler) { let field = this.fields_dict[fieldname]; field.df.change = () => { handler(field, field.value); }; } set_field_values() { if (this.doc.name) this.set_values(this.doc); else return; } set_default_values() { let values = frappe.utils.get_query_params(); delete values.new; this.set_values(values); } set_form_description(intro) { let intro_wrapper = document.getElementById('introduction'); intro_wrapper.innerHTML = intro; } add_button(name, type, action, wrapper_class=".web-form-actions") { const button = document.createElement("button"); button.classList.add("btn", "btn-" + type, "btn-sm", "ml-2"); button.innerHTML = name; button.onclick = action; document.querySelector(wrapper_class).appendChild(button); } add_button_to_footer(name, type, action) { this.add_button(name, type, action, '.web-form-footer'); } add_button_to_header(name, type, action) { this.add_button(name, type, action, '.web-form-actions'); } setup_primary_action() { this.add_button_to_header(this.button_label || "Save", "primary", () => this.save() ); this.add_button_to_footer(this.button_label || "Save", "primary", () => this.save() ); } setup_cancel_button() { this.add_button_to_header(__("Cancel"), "light", () => this.cancel()); } setup_delete_button() { this.add_button_to_header( '<i class="fa fa-trash" aria-hidden="true"></i>', "light", () => this.delete() ); } setup_print_button() { this.add_button_to_header( '<i class="fa fa-print" aria-hidden="true"></i>',<|fim▁hole|> "light", () => this.print() ); } save() { if (this.validate && !this.validate()) { frappe.throw(__("Couldn't save, please check the data you have entered"), __("Validation Error")); } // validation hack: get_values will check for missing data let doc_values = super.get_values(this.allow_incomplete); if (!doc_values) return; if (window.saving) return; let for_payment = Boolean(this.accept_payment && !this.doc.paid); Object.assign(this.doc, doc_values); this.doc.doctype = this.doc_type; this.doc.web_form_name = this.name; // Save window.saving = true; frappe.form_dirty = false; frappe.call({ type: "POST", method: "frappe.website.doctype.web_form.web_form.accept", args: { data: this.doc, web_form: this.name, docname: this.doc.name, for_payment }, callback: response => { // Check for any exception in response if (!response.exc) { // Success this.handle_success(response.message); frappe.web_form.events.trigger('after_save'); this.after_save && this.after_save(); } }, always: function() { window.saving = false; } }); return true; } delete() { frappe.call({ type: "POST", method: "frappe.website.doctype.web_form.web_form.delete", args: { web_form_name: this.name, docname: this.doc.name } }); } print() { window.open(`/printview? doctype=${this.doc_type} &name=${this.doc.name} &format=${this.print_format || "Standard"}`, '_blank'); } cancel() { window.location.href = window.location.pathname; } handle_success(data) { if (this.accept_payment && !this.doc.paid) { window.location.href = data; } const success_dialog = new frappe.ui.Dialog({ title: __("Saved Successfully"), secondary_action: () => { if (this.success_url) { window.location.href = this.success_url; } else if(this.login_required) { window.location.href = window.location.pathname + "?name=" + data.name; } } }); success_dialog.show(); const success_message = this.success_message || __("Your information has been submitted"); success_dialog.set_message(success_message); } }<|fim▁end|>
<|file_name|>unique-autoderef-field.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. //<|fim▁hole|>// except according to those terms. // run-pass #![feature(box_syntax)] struct J { j: isize } pub fn main() { let i: Box<_> = box J { j: 100 }; assert_eq!(i.j, 100); }<|fim▁end|>
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 Ted Mielczarek. See the COPYRIGHT // file at the top-level directory of this distribution. use range_map::{Range, RangeMap}; use std::cmp::Ordering; use std::collections::HashMap; /// A publicly visible linker symbol. #[derive(Debug, Eq, PartialEq)] pub struct PublicSymbol { /// The symbol's address relative to the module's load address. pub address: u64, /// The size of parameters passed to the function. pub parameter_size: u32, /// The name of the symbol. pub name: String, } impl Ord for PublicSymbol { fn cmp(&self, other: &PublicSymbol) -> Ordering { let o = self.address.cmp(&other.address); if o != Ordering::Equal { o } else { // Fall back to sorting by name if addresses are equal. let nameo = self.name.cmp(&other.name); if nameo != Ordering::Equal { nameo } else { // Compare parameter size just for sanity. self.parameter_size.cmp(&other.parameter_size) } } } } impl PartialOrd for PublicSymbol { fn partial_cmp(&self, other: &PublicSymbol) -> Option<Ordering> { Some(self.cmp(other)) } } /// A mapping from machine code bytes to source line and file. #[derive(Clone, Debug, PartialEq, Eq)] pub struct SourceLine { /// The start address relative to the module's load address. pub address: u64, /// The size of this range of instructions in bytes. pub size: u32, /// The source file name that generated this machine code. /// /// This is an index into `SymbolFile::files`. pub file: u32, /// The line number in `file` that generated this machine code. pub line: u32, } /// A source-language function. #[derive(Clone, Debug, PartialEq, Eq)] pub struct Function { /// The function's start address relative to the module's load address. pub address: u64, /// The size of the function in bytes. pub size: u32, /// The size of parameters passed to the function. pub parameter_size: u32, /// The name of the function as declared in the source. pub name: String, /// Source line information for this function. pub lines: RangeMap<u64, SourceLine>, } impl Function { pub fn memory_range(&self) -> Option<Range<u64>> { if self.size == 0 { return None; } Some(Range::new( self.address, self.address.checked_add(self.size as u64)? - 1, )) } } /// Extra metadata that can be safely ignored, but may contain useful facts. #[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] pub enum Info { /// The URL this file was downloaded from. This is added to symbol files /// by HttpSymbolSupplier when it stores them in its cache, so that we /// can populate that info even on a cache hit. Url(String), /// An info line we either don't know about or don't care about. Unknown, } /// DWARF CFI rules for recovering registers at a specific address. #[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] pub struct CfiRules { /// The address in question. pub address: u64, /// Postfix expressions to evaluate to recover register values. pub rules: String, } /// Information used for unwinding stack frames using DWARF CFI. #[derive(Clone, Debug, PartialEq, Eq)] pub struct StackInfoCfi { /// The initial rules for this address range. pub init: CfiRules, /// The size of this entire address range. pub size: u32, /// Additional rules to use at specified addresses. pub add_rules: Vec<CfiRules>, } impl StackInfoCfi { pub fn memory_range(&self) -> Option<Range<u64>> { if self.size == 0 { return None; } Some(Range::new( self.init.address,<|fim▁hole|> )) } } /// Specific details about whether the frame uses a base pointer or has a program string to /// evaluate. #[derive(Clone, Debug, PartialEq, Eq)] pub enum WinFrameType { /// This frame uses FPO-style data. Fpo(StackInfoWin), /// This frame uses new-style frame data, has a program string. FrameData(StackInfoWin), /// Some other type of frame. Unhandled, } #[derive(Clone, Debug, PartialEq, Eq)] pub enum WinStackThing { ProgramString(String), AllocatesBasePointer(bool), } /// Information used for unwinding stack frames using Windows frame info. #[derive(Clone, Debug, PartialEq, Eq)] pub struct StackInfoWin { /// The address in question. pub address: u64, /// The size of the address range covered. pub size: u32, /// The size of the function's prologue. pub prologue_size: u32, /// The size of the function's epilogue. pub epilogue_size: u32, /// The size of arguments passed to this function. pub parameter_size: u32, /// The number of bytes in the stack frame for callee-saves registers. pub saved_register_size: u32, /// The number of bytes in the stack frame for local variables. pub local_size: u32, /// The maximum number of bytes pushed onto the stack by this frame. pub max_stack_size: u32, /// A program string or boolean regarding a base pointer. pub program_string_or_base_pointer: WinStackThing, } impl StackInfoWin { pub fn memory_range(&self) -> Option<Range<u64>> { if self.size == 0 { return None; } Some(Range::new( self.address, self.address.checked_add(self.size as u64)? - 1, )) } } /// A parsed .sym file containing debug symbols. #[derive(Debug, PartialEq)] pub struct SymbolFile { /// The set of source files involved in compilation. pub files: HashMap<u32, String>, /// Publicly visible symbols. pub publics: Vec<PublicSymbol>, /// Functions. pub functions: RangeMap<u64, Function>, /// DWARF CFI unwind information. pub cfi_stack_info: RangeMap<u64, StackInfoCfi>, /// Windows unwind information (frame data). pub win_stack_framedata_info: RangeMap<u64, StackInfoWin>, /// Windows unwind information (FPO data). pub win_stack_fpo_info: RangeMap<u64, StackInfoWin>, // Statistics which are strictly best-effort. Generally this // means we might undercount in situations where we forgot to // log an event. /// If the symbol file was loaded from a URL, this is the url pub url: Option<String>, /// The number of times the parser found that the symbol file was /// strictly ambiguous but simple heuristics repaired it. (e.g. /// two STACK WIN entries overlapped, but the second was a suffix of /// the first, so we just truncated the first.) /// /// Ideally dump_syms would never output this kind of thing, but it's /// tolerable. pub ambiguities_repaired: u64, /// The number of times the parser found that the symbol file was /// ambiguous and just randomly picked one of the options to make /// progress. /// /// e.g. two STACK WIN entries with identical ranges but /// different values, so one was discarded arbitrarily. pub ambiguities_discarded: u64, /// The number of times the parser found that a section of the file /// (generally a line) was corrupt and discarded it. /// /// e.g. a STACK WIN entry where the `type` and `has_program` fields /// have inconsistent values. pub corruptions_discarded: u64, /// The number of times the cfi evaluator failed out in a way that /// implies the cfi entry is fundamentally corrupt. /// /// This isn't detectected during parsing for two reasons: /// /// * We don't parse cfi program strings until we are requested to /// execute them (there's ~millions of program strings which will /// never need to be parsed, so eagerly parsing them would be /// horribly expensive and pointless for anything but debug stats.) /// /// * A program string may technically parse but still be impossible /// to fully evaluate. For instance, it might try to pop values from /// its internal stack when there are none left. /// /// This number may be inflated if a corrupt cfi entry occurs in multiple /// frames, as each attempted eval will be counted. /// /// This number does not include cfi evaluations that failed in ways that /// may be a result of incorrect input memory/registers (e.g. failing /// to evaluate a "dereference pointer" instruction because the pointer /// was not mapped memory). In these situations the cfi entry *may* /// still be correct. pub cfi_eval_corruptions: u64, }<|fim▁end|>
self.init.address.checked_add(self.size as u64)? - 1,
<|file_name|>test_product.cpp<|end_file_name|><|fim▁begin|>// // Created by Aman LaChapelle on 5/26/17. // // pytorch_inference // Copyright (c) 2017 Aman LaChapelle // Full license at pytorch_inference/LICENSE.txt // #include "../include/layers.hpp" #include "utils.hpp" int main(){ std::vector<pytorch::tensor> tests = test_setup({1, 1, 1}, {2, 2, 2}, {45, 45, 45}, {50, 50, 50}, {1}, {2}, {45},<|fim▁hole|> {50}, {"test_prod1.dat", "test_prod2.dat", "test_prod3.dat"}, "test_prod"); // tests has {input1, input2, input3, pytorch_output} pytorch::Product p(pytorch::k, 3); af::timer::start(); pytorch::tensor prod; for (int j = 49; j >= 0; j--){ prod = p({tests[0], tests[1], tests[2]})[0]; prod.eval(); } af::sync(); std::cout << "arrayfire forward took (s): " << af::timer::stop()/50 << "(avg)" << std::endl; assert(almost_equal(prod, tests[3])); }<|fim▁end|>
<|file_name|>EventActionObject.java<|end_file_name|><|fim▁begin|>/* * RAFTools - Copyright (C) 2016 Zane van Iperen. * Contact: [email protected] * <|fim▁hole|> * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2, and only * version 2 as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * * Any and all GPL restrictions may be circumvented with permission from the * the original author. */ package net.vs49688.rafview.wwise.objects; import java.nio.*; public class EventActionObject extends WwiseObject { public EventActionObject(int id, long length, ByteBuffer bb) { super(id, length); } }<|fim▁end|>
<|file_name|>hardware.py<|end_file_name|><|fim▁begin|># # Copyright (c) 1999--2015 Red Hat, Inc. # # This software is licensed to you under the GNU General Public License, # version 2 (GPLv2). There is NO WARRANTY for this software, express or # implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated # in this software or its documentation. # # This thing gets the hardware configuraion out of a system """Used to read hardware info from kudzu, /proc, etc""" from socket import gethostname, getaddrinfo, AF_INET, AF_INET6 import socket import re import os import sys from up2date_client import config from up2date_client import rhnserver from rhn.i18n import ustr try: long except NameError: # long is not defined in python3 long = int try: import ethtool ethtool_present = True except ImportError: sys.stderr.write("Warning: information about network interfaces could not be retrieved on this platform.\n") ethtool_present = False import gettext t = gettext.translation('rhn-client-tools', fallback=True) # Python 3 translations don't have a ugettext method if not hasattr(t, 'ugettext'): t.ugettext = t.gettext _ = t.ugettext import dbus import dmidecode from up2date_client import up2dateLog try: # F13 and EL6 from up2date_client.hardware_gudev import get_devices, get_computer_info using_gudev = 1 except ImportError: from up2date_client.hardware_hal import check_hal_dbus_status, get_hal_computer, read_hal using_gudev = 0 # Some systems don't have the _locale module installed try: import locale except ImportError: locale = None sys.path.append("/usr/share/rhsm") try: from subscription_manager.hwprobe import Hardware as SubManHardware subscription_manager_available = True except ImportError: subscription_manager_available = False # this does not change, we can cache it _dmi_data = None _dmi_not_available = 0 def dmi_warnings(): if not hasattr(dmidecode, 'get_warnings'): return None return dmidecode.get_warnings() dmi_warn = dmi_warnings() if dmi_warn: dmidecode.clear_warnings() log = up2dateLog.initLog() log.log_debug("Warnings collected during dmidecode import: %s" % dmi_warn) def _initialize_dmi_data(): """ Initialize _dmi_data unless it already exist and returns it """ global _dmi_data, _dmi_not_available if _dmi_data is None: if _dmi_not_available: # do not try to initialize it again and again if not available return None else : dmixml = dmidecode.dmidecodeXML() dmixml.SetResultType(dmidecode.DMIXML_DOC) # Get all the DMI data and prepare a XPath context try: data = dmixml.QuerySection('all') dmi_warn = dmi_warnings() if dmi_warn: dmidecode.clear_warnings() log = up2dateLog.initLog() log.log_debug("dmidecode warnings: " % dmi_warn) except: # DMI decode FAIL, this can happend e.g in PV guest _dmi_not_available = 1 dmi_warn = dmi_warnings() if dmi_warn: dmidecode.clear_warnings() return None _dmi_data = data.xpathNewContext() return _dmi_data def get_dmi_data(path): """ Fetch DMI data from given section using given path. If data could not be retrieved, returns empty string. General method and should not be used outside of this module. """ dmi_data = _initialize_dmi_data() if dmi_data is None: return '' data = dmi_data.xpathEval(path) if data != []: return data[0].content else: # The path do not exist return '' def dmi_vendor(): """ Return Vendor from dmidecode bios information. If this value could not be fetch, returns empty string. """ return get_dmi_data('/dmidecode/BIOSinfo/Vendor') def dmi_system_uuid(): """ Return UUID from dmidecode system information. If this value could not be fetch, returns empty string. """ # if guest was created manualy it can have empty UUID, in this # case dmidecode set attribute unavailable to 1 uuid = get_dmi_data("/dmidecode/SystemInfo/SystemUUID[not(@unavailable='1')]") if not uuid: uuid = '' return uuid def read_installinfo(): if not os.access("/etc/sysconfig/installinfo", os.R_OK): return {} installinfo = open("/etc/sysconfig/installinfo", "r").readlines() installdict = {} installdict['class'] = "INSTALLINFO" for info in installinfo: if not len(info): continue vals = info.split('=') if len(vals) <= 1: continue strippedstring = vals[0].strip() vals[0] = strippedstring installdict[vals[0]] = ''.join(vals[1:]).strip() return installdict def cpu_count(): """ returns number of CPU in system Beware that it can be different from number of active CPU (e.g. on s390x architecture """ try: cpu_dir = os.listdir('/sys/devices/system/cpu/') except OSError: cpu_dir = [] re_cpu = re.compile(r"^cpu[0-9]+$") return len([i for i in cpu_dir if re_cpu.match(i)]) # get the number of sockets available on this machine def __get_number_sockets(): try: if subscription_manager_available: return SubManHardware().getCpuInfo()['cpu.cpu_socket(s)'] except: pass # something went wrong, let's figure it out ourselves number_sockets = 0 # Try lscpu command if available if os.access("/usr/bin/lscpu", os.X_OK): try: lines = os.popen("/usr/bin/lscpu -p").readlines() max_socket_index = -1 for line in lines: if line.startswith('#'): continue # get the socket index from the output socket_index = int(line.split(',')[2]) if socket_index > max_socket_index: max_socket_index = socket_index if max_socket_index > -1: return 1 + max_socket_index except: pass # Next try parsing /proc/cpuinfo if os.access("/proc/cpuinfo", os.R_OK): try: lines = open("/proc/cpuinfo", 'r').readlines() socket_ids = set() for line in lines: if 'physical id' in line: socket_index = int(line.split(':')[1].strip()) socket_ids.add(socket_index) if len(socket_ids) > 0: return len(socket_ids) except: pass # Next try dmidecode if os.access("/usr/sbin/dmidecode", os.X_OK): try: lines = os.popen("/usr/sbin/dmidecode -t processor").readlines() count = 0 for line in lines: if 'Processor Information' in line: count += 1 if count > 0: return count except: pass return None # This has got to be one of the ugliest fucntions alive def read_cpuinfo(): def get_entry(a, entry): e = entry.lower() if not e in a: return "" return a[e] # read cpu list and return number of cpus and list as dictionary def get_cpulist_as_dict(cpulist): count = 0 tmpdict = {} for cpu in cpulist.split("\n\n"): if not len(cpu): continue count = count + 1 if count > 1: break # no need to parse rest for cpu_attr in cpu.split("\n"): if not len(cpu_attr): continue vals = cpu_attr.split(":") if len(vals) != 2: # XXX: make at least some effort to recover this data... continue name, value = vals[0].strip(), vals[1].strip() tmpdict[name.lower()] = value return tmpdict if not os.access("/proc/cpuinfo", os.R_OK): return {} # Okay, the kernel likes to give us the information we need in the # standard "C" locale. if locale: # not really needed if you don't plan on using atof() locale.setlocale(locale.LC_NUMERIC, "C") cpulist = open("/proc/cpuinfo", "r").read() uname = os.uname()[4].lower() count = cpu_count() # This thing should return a hwdict that has the following # members: # # class, desc (required to identify the hardware device) # count, type, model, model_number, model_ver, model_rev # bogomips, platform, speed, cache hwdict = { 'class': "CPU", "desc" : "Processor", } if uname[0] == "i" and uname[-2:] == "86" or (uname == "x86_64"): # IA32 compatible enough tmpdict = get_cpulist_as_dict(cpulist) if uname == "x86_64": hwdict['platform'] = 'x86_64' else: hwdict['platform'] = "i386" hwdict['count'] = count hwdict['type'] = get_entry(tmpdict, 'vendor_id') hwdict['model'] = get_entry(tmpdict, 'model name') hwdict['model_number'] = get_entry(tmpdict, 'cpu family') hwdict['model_ver'] = get_entry(tmpdict, 'model') hwdict['model_rev'] = get_entry(tmpdict, 'stepping') hwdict['cache'] = get_entry(tmpdict, 'cache size') hwdict['bogomips'] = get_entry(tmpdict, 'bogomips') hwdict['other'] = get_entry(tmpdict, 'flags') mhz_speed = get_entry(tmpdict, 'cpu mhz') if mhz_speed == "": # damn, some machines don't report this mhz_speed = "-1" try: hwdict['speed'] = int(round(float(mhz_speed)) - 1) except ValueError: hwdict['speed'] = -1 elif uname in["alpha", "alphaev6"]: # Treat it as an an Alpha tmpdict = get_cpulist_as_dict(cpulist) hwdict['platform'] = "alpha" hwdict['count'] = get_entry(tmpdict, 'cpus detected') hwdict['type'] = get_entry(tmpdict, 'cpu') hwdict['model'] = get_entry(tmpdict, 'cpu model') hwdict['model_number'] = get_entry(tmpdict, 'cpu variation') hwdict['model_version'] = "%s/%s" % (get_entry(tmpdict, 'system type'), get_entry(tmpdict,'system variation')) hwdict['model_rev'] = get_entry(tmpdict, 'cpu revision') hwdict['cache'] = "" # pitty the kernel doesn't tell us this. hwdict['bogomips'] = get_entry(tmpdict, 'bogomips') hwdict['other'] = get_entry(tmpdict, 'platform string') hz_speed = get_entry(tmpdict, 'cycle frequency [Hz]') # some funky alphas actually report in the form "462375000 est." hz_speed = hz_speed.split() try: hwdict['speed'] = int(round(float(hz_speed[0]))) / 1000000 except ValueError: hwdict['speed'] = -1 elif uname in ["ia64"]: tmpdict = get_cpulist_as_dict(cpulist) hwdict['platform'] = uname hwdict['count'] = count hwdict['type'] = get_entry(tmpdict, 'vendor') hwdict['model'] = get_entry(tmpdict, 'family') hwdict['model_ver'] = get_entry(tmpdict, 'archrev') hwdict['model_rev'] = get_entry(tmpdict, 'revision') hwdict['bogomips'] = get_entry(tmpdict, 'bogomips') mhz_speed = tmpdict['cpu mhz'] try: hwdict['speed'] = int(round(float(mhz_speed)) - 1) except ValueError: hwdict['speed'] = -1 hwdict['other'] = get_entry(tmpdict, 'features') elif uname in ['ppc64']: tmpdict = get_cpulist_as_dict(cpulist) hwdict['platform'] = uname hwdict['count'] = count hwdict['model'] = get_entry(tmpdict, "cpu") hwdict['model_ver'] = get_entry(tmpdict, 'revision') hwdict['bogomips'] = get_entry(tmpdict, 'bogomips') hwdict['type'] = get_entry(tmpdict, 'machine') # strings are postpended with "mhz" mhz_speed = get_entry(tmpdict, 'clock')[:-3] try: hwdict['speed'] = int(round(float(mhz_speed)) - 1) except ValueError: hwdict['speed'] = -1 elif uname in ['s390', 's390x']: tmpdict = {} for cpu in cpulist.split("\n"): vals = cpu.split(": ") if len(vals) != 2: continue tmpdict[vals[0].strip()] = vals[1].strip() <|fim▁hole|> hwdict['type'] = get_entry(tmpdict,'vendor_id') hwdict['model'] = uname hwdict['count'] = count hwdict['bogomips'] = get_entry(tmpdict, 'bogomips per cpu') hwdict['model_number'] = "" hwdict['model_ver'] = "" hwdict['model_rev'] = "" hwdict['cache'] = "" hwdict['other'] = get_entry(tmpdict, 'features') hwdict['speed'] = 0 else: # XXX: expand me. Be nice to others hwdict['platform'] = uname hwdict['count'] = count hwdict['type'] = uname hwdict['model'] = uname hwdict['model_number'] = "" hwdict['model_ver'] = "" hwdict['model_rev'] = "" hwdict['cache'] = "" hwdict['bogomips'] = "" hwdict['other'] = "" hwdict['speed'] = 0 # make sure we get the right number here if not hwdict["count"]: hwdict["count"] = 1 else: try: hwdict["count"] = int(hwdict["count"]) except: hwdict["count"] = 1 else: if hwdict["count"] == 0: # we have at least one hwdict["count"] = 1 # Network communication doesn't really belong in here. Sadly though # this is the only single place we can put this check. If it's not # here then it would need to be in five or six other places, which # is not good from a DRY and quality-assurance perspective. s = rhnserver.RhnServer() if s.capabilities.hasCapability('cpu_sockets'): # If we know it add in the number of sockets number_sockets = __get_number_sockets() if number_sockets: hwdict['socket_count'] = number_sockets # This whole things hurts a lot. return hwdict def read_memory(): un = os.uname() kernel = un[2] if kernel[:3] >= "2.6": return read_memory_2_6() if kernel[:3] == "2.4": return read_memory_2_4() def read_memory_2_4(): if not os.access("/proc/meminfo", os.R_OK): return {} meminfo = open("/proc/meminfo", "r").read() lines = meminfo.split("\n") curline = lines[1] memlist = curline.split() memdict = {} memdict['class'] = "MEMORY" megs = int(long(memlist[1])/(1024*1024)) if megs < 32: megs = megs + (4 - (megs % 4)) else: megs = megs + (16 - (megs % 16)) memdict['ram'] = str(megs) curline = lines[2] memlist = curline.split() # otherwise, it breaks on > ~4gigs of swap megs = int(long(memlist[1])/(1024*1024)) memdict['swap'] = str(megs) return memdict def read_memory_2_6(): if not os.access("/proc/meminfo", os.R_OK): return {} meminfo = open("/proc/meminfo", "r").read() lines = meminfo.split("\n") meminfo_dict = {} for line in lines: blobs = line.split(":", 1) key = blobs[0] if len(blobs) == 1: continue #print(blobs) value = blobs[1].strip() meminfo_dict[key] = value memdict = {} memdict["class"] = "MEMORY" total_str = meminfo_dict['MemTotal'] blips = total_str.split(" ") total_k = long(blips[0]) megs = long(total_k/(1024)) swap_str = meminfo_dict['SwapTotal'] blips = swap_str.split(' ') swap_k = long(blips[0]) swap_megs = long(swap_k/(1024)) memdict['ram'] = str(megs) memdict['swap'] = str(swap_megs) return memdict def findHostByRoute(): """ returns [hostname, intf, intf6] Where hostname is you FQDN of this machine. And intf is numeric IPv4 address. And intf6 is IPv6 address. """ cfg = config.initUp2dateConfig() sl = config.getServerlURL() st = {'https':443, 'http':80} hostname = None intf = None intf6 = None for serverUrl in sl: server = serverUrl.split('/')[2] servertype = serverUrl.split(':')[0] port = st[servertype] for family in (AF_INET6, AF_INET): try: s = socket.socket(family) except socket.error: continue if cfg['enableProxy']: server_port = config.getProxySetting() (server, port) = server_port.split(':') port = int(port) try: s.settimeout(5) s.connect((server, port)) intf_tmp = s.getsockname()[0] if family == AF_INET: intf = intf_tmp else: intf6 = intf_tmp hostname_tmp = socket.getfqdn(intf_tmp) if hostname_tmp != intf_tmp: hostname = hostname_tmp except socket.error: s.close() continue s.close() # Override hostname with the value from /etc/hostname if os.path.isfile("/etc/hostname") and os.access("/etc/hostname", os.R_OK): hostnameinfo = open("/etc/hostname", "r").readlines() for info in hostnameinfo: if not len(info): continue hostname = info.strip() # Override hostname with the one in /etc/sysconfig/network # for bz# 457953 elif os.path.isfile("/etc/sysconfig/network") and os.access("/etc/sysconfig/network", os.R_OK): networkinfo = open("/etc/sysconfig/network", "r").readlines() for info in networkinfo: if not len(info): continue vals = info.split('=') if len(vals) <= 1: continue strippedstring = vals[0].strip() vals[0] = strippedstring if vals[0] == "HOSTNAME": hostname = ''.join(vals[1:]).strip() break if hostname == None or hostname == 'localhost.localdomain': hostname = "unknown" return hostname, intf, intf6 def get_slave_hwaddr(master, slave): hwaddr = "" try: bonding = open('/proc/net/bonding/%s' % master, "r") except: return hwaddr slave_found = False for line in bonding.readlines(): if slave_found and line.find("Permanent HW addr: ") != -1: hwaddr = line.split()[3] break if line.find("Slave Interface: ") != -1: ifname = line.split()[2] if ifname == slave: slave_found = True bonding.close() return hwaddr def read_network(): netdict = {} netdict['class'] = "NETINFO" netdict['hostname'], netdict['ipaddr'], netdict['ip6addr'] = findHostByRoute() if netdict['hostname'] == "unknown": netdict['hostname'] = gethostname() if "." not in netdict['hostname']: netdict['hostname'] = socket.getfqdn() if netdict['ipaddr'] is None: try: list_of_addrs = getaddrinfo(netdict['hostname'], None) ipv4_addrs = filter(lambda x:x[0]==socket.AF_INET, list_of_addrs) # take first ipv4 addr netdict['ipaddr'] = ipv4_addrs[0][4][0] except: netdict['ipaddr'] = "127.0.0.1" if netdict['ip6addr'] is None: try: list_of_addrs = getaddrinfo(netdict['hostname'], None) ipv6_addrs = filter(lambda x:x[0]==socket.AF_INET6, list_of_addrs) # take first ipv6 addr netdict['ip6addr'] = ipv6_addrs[0][4][0] except: netdict['ip6addr'] = "::1" if netdict['ipaddr'] is None: netdict['ipaddr'] = '' if netdict['ip6addr'] is None: netdict['ip6addr'] = '' return netdict def read_network_interfaces(): intDict = {} intDict['class'] = "NETINTERFACES" if not ethtool_present: # ethtool is not available on non-linux platforms (as kfreebsd), skip it return intDict interfaces = list(set(ethtool.get_devices() + ethtool.get_active_devices())) for interface in interfaces: try: hwaddr = ethtool.get_hwaddr(interface) except: hwaddr = "" # slave devices can have their hwaddr changed try: master = os.readlink('/sys/class/net/%s/master' % interface) except: master = None if master: master_interface = os.path.basename(master) hwaddr = get_slave_hwaddr(master_interface, interface) try: module = ethtool.get_module(interface) except: if interface == 'lo': module = "loopback" else: module = "Unknown" try: ipaddr = ethtool.get_ipaddr(interface) except: ipaddr = "" try: netmask = ethtool.get_netmask(interface) except: netmask = "" try: broadcast = ethtool.get_broadcast(interface) except: broadcast = "" ip6_list = [] dev_info = ethtool.get_interfaces_info(interface) for info in dev_info: # one interface may have more IPv6 addresses for ip6 in info.get_ipv6_addresses(): scope = ip6.scope if scope == 'global': scope = 'universe' ip6_list.append({ 'scope': scope, 'addr': ip6.address, 'netmask': ip6.netmask }) intDict[interface] = {'hwaddr':hwaddr, 'ipaddr':ipaddr, 'netmask':netmask, 'broadcast':broadcast, 'module': module, 'ipv6': ip6_list} return intDict # Read DMI information via hal. def read_dmi(): dmidict = {} dmidict["class"] = "DMI" # Try to obtain DMI info if architecture is i386, x86_64 or ia64 uname = os.uname()[4].lower() if not (uname[0] == "i" and uname[-2:] == "86") and not (uname == "x86_64"): return dmidict # System Information vendor = dmi_vendor() if vendor: dmidict["vendor"] = vendor product = get_dmi_data('/dmidecode/SystemInfo/ProductName') if product: dmidict["product"] = product version = get_dmi_data('/dmidecode/SystemInfo/Version') if version: system = product + " " + version dmidict["system"] = system # BaseBoard Information dmidict["board"] = get_dmi_data('/dmidecode/BaseBoardInfo/Manufacturer') # Bios Information vendor = get_dmi_data('/dmidecode/BIOSinfo/Vendor') if vendor: dmidict["bios_vendor"] = vendor version = get_dmi_data('/dmidecode/BIOSinfo/Version') if version: dmidict["bios_version"] = version release = get_dmi_data('/dmidecode/BIOSinfo/ReleaseDate') if release: dmidict["bios_release"] = release # Chassis Information # The hairy part is figuring out if there is an asset tag/serial number of importance chassis_serial = get_dmi_data('/dmidecode/ChassisInfo/SerialNumber') chassis_tag = get_dmi_data('/dmidecode/ChassisInfo/AssetTag') board_serial = get_dmi_data('/dmidecode/BaseBoardInfo/SerialNumber') system_serial = get_dmi_data('/dmidecode/SystemInfo/SerialNumber') dmidict["asset"] = "(%s: %s) (%s: %s) (%s: %s) (%s: %s)" % ("chassis", chassis_serial, "chassis", chassis_tag, "board", board_serial, "system", system_serial) # Clean up empty entries for k in list(dmidict.keys()): if dmidict[k] is None: del dmidict[k] # Finished return dmidict def get_hal_system_and_smbios(): try: if using_gudev: props = get_computer_info() else: computer = get_hal_computer() props = computer.GetAllProperties() except Exception: log = up2dateLog.initLog() msg = "Error reading system and smbios information: %s\n" % (sys.exc_info()[1]) log.log_debug(msg) return {} system_and_smbios = {} for key in props: if key.startswith('system'): system_and_smbios[ustr(key)] = ustr(props[key]) system_and_smbios.update(get_smbios()) return system_and_smbios def get_smbios(): """ Returns dictionary with values we are interested for. For historical reason it is in format, which use HAL. Currently in dictionary are keys: smbios.system.uuid, smbios.bios.vendor, smbios.system.serial, smbios.system.manufacturer. """ _initialize_dmi_data() if _dmi_not_available: return {} else: return { 'smbios.system.uuid': dmi_system_uuid(), 'smbios.bios.vendor': dmi_vendor(), 'smbios.system.serial': get_dmi_data('/dmidecode/SystemInfo/SerialNumber'), 'smbios.system.manufacturer': get_dmi_data('/dmidecode/SystemInfo/Manufacturer'), 'smbios.system.product': get_dmi_data('/dmidecode/SystemInfo/ProductName'), 'smbios.system.skunumber': get_dmi_data('/dmidecode/SystemInfo/SKUnumber'), 'smbios.system.family': get_dmi_data('/dmidecode/SystemInfo/Family'), 'smbios.system.version': get_dmi_data('/dmidecode/SystemInfo/Version'), } # this one reads it all def Hardware(): if using_gudev: allhw = get_devices() else: hal_status, dbus_status = check_hal_dbus_status() hwdaemon = 1 if hal_status or dbus_status: # if status != 0 haldaemon or messagebus service not running. # set flag and dont try probing hardware and DMI info # and warn the user. log = up2dateLog.initLog() msg = "Warning: haldaemon or messagebus service not running. Cannot probe hardware and DMI information.\n" log.log_me(msg) hwdaemon = 0 allhw = [] if hwdaemon: try: ret = read_hal() if ret: allhw = ret except: # bz253596 : Logging Dbus Error messages instead of printing on stdout log = up2dateLog.initLog() msg = "Error reading hardware information: %s\n" % (sys.exc_info()[0]) log.log_me(msg) # all others return individual arrays # cpu info try: ret = read_cpuinfo() if ret: allhw.append(ret) except: print(_("Error reading cpu information:"), sys.exc_info()[0]) # memory size info try: ret = read_memory() if ret: allhw.append(ret) except: print(_("Error reading system memory information:"), sys.exc_info()[0]) cfg = config.initUp2dateConfig() if not cfg["skipNetwork"]: # minimal networking info try: ret = read_network() if ret: allhw.append(ret) except: print(_("Error reading networking information:"), sys.exc_info()[0]) # dont like catchall exceptions but theres not # really anything useful we could do at this point # and its been trouble prone enough # minimal DMI info try: ret = read_dmi() if ret: allhw.append(ret) except: # bz253596 : Logging Dbus Error messages instead of printing on stdout log = up2dateLog.initLog() msg = "Error reading DMI information: %s\n" % (sys.exc_info()[0]) log.log_me(msg) try: ret = read_installinfo() if ret: allhw.append(ret) except: print(_("Error reading install method information:"), sys.exc_info()[0]) if not cfg["skipNetwork"]: try: ret = read_network_interfaces() if ret: allhw.append(ret) except: print(_("Error reading network interface information:"), sys.exc_info()[0]) # all Done. return allhw # XXX: Need more functions here: # - filesystems layout (/proc.mounts and /proc/mdstat) # - is the kudzu config enough or should we strat chasing lscpi and try to parse that # piece of crap output? # # Main program # if __name__ == '__main__': for hw in Hardware(): for k in hw.keys(): print("'%s' : '%s'" % (k, hw[k])) print<|fim▁end|>
hwdict['platform'] = uname
<|file_name|>GetAllJobsResponse.java<|end_file_name|><|fim▁begin|>/** * GetAllUsersResponse.java * Created by pgirard at 2:07:29 PM on Aug 19, 2010 * in the com.qagwaai.starmalaccamax.shared.services.action package * for the JobMalaccamax project */ package com.qagwaai.starmalaccamax.client.service.action; import java.util.ArrayList; <|fim▁hole|> /** * @author pgirard * */ public final class GetAllJobsResponse extends AbstractResponse implements IsSerializable { /** * */ private ArrayList<JobDTO> jobs; /** * */ private int totalJobs; /** * @return the users */ public ArrayList<JobDTO> getJobs() { return jobs; } /** * @return the totalJobs */ public int getTotalJobs() { return totalJobs; } /** * @param jobs * the users to set */ public void setJobs(final ArrayList<JobDTO> jobs) { this.jobs = jobs; } /** * @param totalJobs * the totalJobs to set */ public void setTotalJobs(final int totalJobs) { this.totalJobs = totalJobs; } /** * {@inheritDoc} */ @Override public String toString() { return "GetAllJobsResponse [jobs=" + jobs + ", totalJobs=" + totalJobs + "]"; } }<|fim▁end|>
import com.google.gwt.user.client.rpc.IsSerializable; import com.qagwaai.starmalaccamax.shared.model.JobDTO;
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>""" # lint-amnesty, pylint: disable=django-not-configured # lint-amnesty, pylint: disable=django-not-configured Bookmarks module. """ <|fim▁hole|> DEFAULT_FIELDS = [ 'id', 'course_id', 'usage_id', 'block_type', 'created', ] OPTIONAL_FIELDS = [ 'display_name', 'path', ] PathItem = namedtuple('PathItem', ['usage_key', 'display_name'])<|fim▁end|>
from collections import namedtuple