prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>CredentialsCode.java<|end_file_name|><|fim▁begin|>package com.thilko.springdoc; @SuppressWarnings("all") public class CredentialsCode { Integer age; double anotherValue; public Integer getAge() { return age; } public void setAge(Integer age) {<|fim▁hole|> public double getAnotherValue() { return anotherValue; } public void setAnotherValue(double anotherValue) { this.anotherValue = anotherValue; } }<|fim▁end|>
this.age = age; }
<|file_name|>coderunner.py<|end_file_name|><|fim▁begin|>import code import signal import sys import greenlet import logging class SigintHappened(object): pass class SystemExitFromCodeThread(SystemExit): pass class CodeRunner(object): """Runs user code in an interpreter, taking care of stdout/in/err""" def __init__(self, interp=None, stuff_a_refresh_request=lambda:None):<|fim▁hole|> self.main_greenlet = greenlet.getcurrent() self.code_greenlet = None self.stuff_a_refresh_request = stuff_a_refresh_request self.code_is_waiting = False self.sigint_happened = False self.orig_sigint_handler = None @property def running(self): return self.source and self.code_greenlet def load_code(self, source): """Prep code to be run""" self.source = source self.code_greenlet = None def _unload_code(self): """Called when done running code""" self.source = None self.code_greenlet = None self.code_is_waiting = False def run_code(self, for_code=None): """Returns Truthy values if code finishes, False otherwise if for_code is provided, send that value to the code greenlet if source code is complete, returns "done" if source code is incomplete, returns "unfinished" """ if self.code_greenlet is None: assert self.source is not None self.code_greenlet = greenlet.greenlet(self._blocking_run_code) self.orig_sigint_handler = signal.getsignal(signal.SIGINT) signal.signal(signal.SIGINT, self.sigint_handler) request = self.code_greenlet.switch() else: assert self.code_is_waiting self.code_is_waiting = False signal.signal(signal.SIGINT, self.sigint_handler) if self.sigint_happened: self.sigint_happened = False request = self.code_greenlet.switch(SigintHappened) else: request = self.code_greenlet.switch(for_code) if request in ['wait', 'refresh']: self.code_is_waiting = True if request == 'refresh': self.stuff_a_refresh_request() return False elif request in ['done', 'unfinished']: self._unload_code() signal.signal(signal.SIGINT, self.orig_sigint_handler) self.orig_sigint_handler = None return request elif request in ['SystemExit']: #use the object? self._unload_code() raise SystemExitFromCodeThread() else: raise ValueError("Not a valid value from code greenlet: %r" % request) def sigint_handler(self, *args): if greenlet.getcurrent() is self.code_greenlet: logging.debug('sigint while running user code!') raise KeyboardInterrupt() else: logging.debug('sigint while fufilling code request sigint handler running!') self.sigint_happened = True def _blocking_run_code(self): try: unfinished = self.interp.runsource(self.source) except SystemExit: return 'SystemExit' return 'unfinished' if unfinished else 'done' def wait_and_get_value(self): """Return the argument passed in to .run_code(for_code) Nothing means calls to run_code must be... """ value = self.main_greenlet.switch('wait') if value is SigintHappened: raise KeyboardInterrupt() return value def refresh_and_get_value(self): """Returns the argument passed in to .run_code(for_code) """ value = self.main_greenlet.switch('refresh') if value is SigintHappened: raise KeyboardInterrupt() return value class FakeOutput(object): def __init__(self, coderunner, please): self.coderunner = coderunner self.please = please def write(self, *args, **kwargs): self.please(*args, **kwargs) return self.coderunner.refresh_and_get_value() def test_simple(): orig_stdout = sys.stdout orig_stderr = sys.stderr c = CodeRunner(stuff_a_refresh_request=lambda: orig_stdout.flush() or orig_stderr.flush()) stdout = FakeOutput(c, orig_stdout.write) sys.stdout = stdout c.load_code('1 + 1') c.run_code() c.run_code() c.run_code() def test_exception(): orig_stdout = sys.stdout orig_stderr = sys.stderr c = CodeRunner(stuff_a_refresh_request=lambda: orig_stdout.flush() or orig_stderr.flush()) def ctrlc(): raise KeyboardInterrupt() stdout = FakeOutput(c, lambda x: ctrlc()) sys.stdout = stdout c.load_code('1 + 1') c.run_code() if __name__ == '__main__': test_simple()<|fim▁end|>
self.interp = interp or code.InteractiveInterpreter() self.source = None
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/env/bin/ python3 from setuptools import setup, Extension # #CXX_FLAGS = "-O3 -std=gnu++11 -Wall -Wno-comment" # ## List of C/C++ sources that will conform the library #sources = [ # # "andrnx/clib/android.c", # #]<|fim▁hole|>setup(name="andrnx", version="0.1", description="Package to convert from GNSS logger to Rinex files", author='Miquel Garcia', author_email='[email protected]', url='https://www.rokubun.cat', packages=['andrnx'], test_suite="andrnx.test", scripts=['bin/gnsslogger_to_rnx'])<|fim▁end|>
<|file_name|>synth.rs<|end_file_name|><|fim▁begin|>use audiobuffer::*; use processblock::ProcessBlock; use port::Port; use jack::prelude::RawMidi; use synthconfig::SynthConfig; #[derive(Debug, Clone, Copy)] pub struct BlockId(usize); #[derive(Debug, Clone)] pub struct Connection{ buffer_id: usize, block: BlockId, port: Port, } impl Connection{ pub fn new() -> Connection { Connection{ buffer_id: 0, block: BlockId(0), port: Port{nr: 0}} }<|fim▁hole|> #[derive(Debug)] struct ProcessBlockAtSynth{ block: Box<ProcessBlock>, inputs: Vec<Connection>, // connection to an output outputs: Vec<usize>, // id of this output } #[derive(Debug)] pub struct Synth{ blocks: Vec<ProcessBlockAtSynth>, output: Connection, buffer_size: usize, output_port_count: usize, // only between prework and postwork workdata: Option<WorkData> } #[derive(Debug)] struct WorkData{ nframes: usize, audiobuffers: AudioBufferVector, inputs: AudioBufferVector, outputs: AudioBufferVector, workorder: Vec<usize>, output: AudioBuffer, } /* Try to change how it works internally, to create all audiobuffers for output and input (input to 0), and at connect, change the input buffers. They are Rc so should work. If works, then the work order does not need synthwork, but just the list of blocks in the proper order. */ impl Synth{ pub fn new() -> Synth{ let mut sn = Synth{ blocks: Vec::new(), output: Connection{ buffer_id: 0, block: BlockId(127), port: Port{nr:0} }, buffer_size: 128, output_port_count: 0, workdata: None }; sn.add(::blocks::midi::MIDI::new()); sn } pub fn connect(&mut self, block_out: BlockId, port_out: Port, block_in: BlockId, port_in: Port) -> &mut Self { let buffer_id = self.get_output_port_number(block_out, port_out); { let conn = &mut self.blocks[block_in.0].inputs[port_in.nr]; conn.buffer_id = buffer_id; conn.block = block_out; conn.port = port_out; } self } pub fn add(&mut self, block: Box<ProcessBlock>) -> BlockId{ let n = self.blocks.len(); let inputs=vec![Connection::new(); block.input_count()]; let mut outputs=Vec::new(); for _ in 0..block.output_count(){ outputs.push(self.output_port_count); self.output_port_count+=1; } self.blocks.push(ProcessBlockAtSynth{ block: block, inputs: inputs, outputs: outputs}); BlockId(n) } pub fn output(&mut self, output: BlockId, port: Port){ self.output=Connection::new(); self.output.block=output; self.output.port=port; } fn get_output_port_count(&self) -> usize{ let mut count = 0; for b in &self.blocks{ count += b.block.output_count(); } count } fn get_max_input_ports(&self) -> usize{ let mut max = 0; for b in &self.blocks{ max = ::std::cmp::max( max, b.block.input_count()) } max } fn get_max_output_ports(&self) -> usize{ let mut max = 0; for b in &self.blocks{ max = ::std::cmp::max( max, b.block.output_count() ) } max } pub fn get_midi(&self) -> BlockId{ BlockId(0) // always the first block is the midi connector } pub fn pre_work(&mut self, config: &SynthConfig){ let workdata = WorkData{ workorder: self.calculate_work_order(), //println!("Workorder is {:?}", workorder); audiobuffers: AudioBufferVector::new(self.get_output_port_count(), self.buffer_size), inputs: AudioBufferVector::new_empty(self.get_max_input_ports()), outputs: AudioBufferVector::new_empty(self.get_max_output_ports()), nframes: 0, output: AudioBuffer::new(self.buffer_size) }; self.workdata = Some(workdata); for bl in &mut self.blocks{ bl.block.setup(config); } } pub fn work(&mut self) -> &AudioBuffer{ let mut workdata_option = self.workdata.take(); { let mut workdata = &mut workdata_option.as_mut().unwrap(); let mut audiobuffers = &mut workdata.audiobuffers; let mut inputs = &mut workdata.inputs; let mut outputs = &mut workdata.outputs; for (block_id, rpb) in (&workdata.workorder).into_iter().enumerate(){ let pb = *rpb; // loan audio buffers from the main list of audiobuffers { let cblock = &self.blocks[pb]; let block = &cblock.block; //println!("{}", Colour::Green.paint(format!("## {:?} ({:?}) <({:?}) <({:?})", block, block_id, cblock.inputs, cblock.outputs))); for port_in in 0..block.input_count() { let ipn = self.get_input_port_number(BlockId(block_id), Port::new(port_in)); let ab = audiobuffers.get( ipn ) .expect(format!("ERROR module {:?}: Input port {} at {:?}/{:?} already in use.", block.typename(), ipn, block_id, port_in).as_str()); inputs.put(port_in, ab); } // println!("Got i"); for port_out in 0..block.output_count() { let opn = self.get_output_port_number(BlockId(block_id), Port::new(port_out)); let ab = audiobuffers.get( opn ) .expect(format!("ERROR module {:?}: Input port {} at {:?}/{:?} already in use.", block.typename(), opn, block_id, port_out).as_str()); outputs.put(port_out, ab); } // println!("Got io"); } // process self.blocks[pb].block.process(&mut inputs, &mut outputs); // return the buffers { let block = &self.blocks[pb].block; // println!("Put io"); for port_in in 0..block.input_count() { audiobuffers.put( self.get_input_port_number(BlockId(block_id), Port::new(port_in)), inputs.get(port_in) .expect(format!("Invalid input port in at {}/{}", block_id, port_in).as_str()) ); } for port_out in 0..block.output_count() { let op = outputs.get(port_out).expect(format!("ERROR {} forgot to return buffer {}", block.typename(), port_out).as_str()); audiobuffers.put(self.get_output_port_number(BlockId(block_id), Port::new(port_out)), op ); } audiobuffers.check_all_some(); // println!("Done all ok"); } } let out_block = (self.output.block).0; let out_port = self.output.port; let outputp = self.blocks[out_block].outputs[out_port.nr]; let output = audiobuffers.get(outputp).unwrap(); //println!("{}: {}", workdata.nframes, Colour::Blue.paint(format!("{}", output))); for (o, i) in ::itertools::zip(&mut workdata.output, &output){ *o=*i } audiobuffers.put(outputp, output); workdata.nframes+=1; } self.workdata=workdata_option; &self.workdata.as_ref().unwrap().output } // pub fn post_work(&mut self){ // self.workdata=None; // } pub fn send_midi(&mut self, event: RawMidi){ let genblock = &mut self.blocks[0].block; let midi = &mut genblock.into_midi().unwrap(); midi.event(event) } fn calculate_work_order(&mut self) -> Vec<usize>{ let mut wo = Vec::new(); for i in 0..self.blocks.len() { wo.push(i); } wo } fn get_input_port_number(&self, block_id: BlockId, port_id: Port) -> usize{ self.blocks[block_id.0].inputs[port_id.nr].buffer_id } fn get_output_port_number(&self, block_id: BlockId, port_id: Port) -> usize{ // println!("Get {:?}/{:?}", &block_id, &port_id); match self.blocks.get(block_id.0) { None =>{ println!("Invalid block id {:?}", block_id); 0 }, Some(block) => match block.outputs.get(port_id.nr) { None => { println!("Invalid output id {:?} at {:?} ({:?})", port_id, block_id, block.block); 0 }, Some(x) => *x } } } pub fn block(&self, bl: &BlockId) -> &Box<ProcessBlock>{ &self.blocks[bl.0].block } pub fn set_cc_value(&mut self, cc_name: &str, value: f32){ self.blocks[0].block.into_midi().unwrap().set_cc_value(cc_name, value); } } #[cfg(test)] mod tests{ #[test] fn synth_to_stdout(){ } }<|fim▁end|>
}
<|file_name|>macroJs.test.ts<|end_file_name|><|fim▁begin|>import { parseExpression } from "@babel/parser" import * as types from "@babel/types" import MacroJs from "./macroJs" function createMacro() { return new MacroJs({ types }, { i18nImportName: "i18n" }) } describe("js macro", () => { describe("tokenizeTemplateLiteral", () => { it("simple message without arguments", () => { const macro = createMacro() const exp = parseExpression("t`Message`") const tokens = macro.tokenizeTemplateLiteral(exp) expect(tokens).toEqual([ { type: "text", value: "Message", }, ]) }) it("with custom lingui instance", () => { const macro = createMacro() const exp = parseExpression("t(i18n)`Message`") const tokens = macro.tokenizeTemplateLiteral(exp) expect(tokens).toEqual([ { type: "text", value: "Message", }, ]) }) it("message with named argument", () => { const macro = createMacro() const exp = parseExpression("t`Message ${name}`") const tokens = macro.tokenizeTemplateLiteral(exp) expect(tokens).toEqual([ { type: "text", value: "Message ", }, { type: "arg", name: "name", value: expect.objectContaining({ name: "name", type: "Identifier", }), }, ]) }) it("message with positional argument", () => { const macro = createMacro() const exp = parseExpression("t`Message ${obj.name}`") const tokens = macro.tokenizeTemplateLiteral(exp) expect(tokens).toEqual([ { type: "text", value: "Message ", }, { type: "arg", name: 0, value: expect.objectContaining({ type: "MemberExpression", }), }, ]) }) it("message with plural", () => { const macro = createMacro() const exp = parseExpression("t`Message ${plural(count, {})}`") const tokens = macro.tokenizeTemplateLiteral(exp) expect(tokens).toEqual([ {<|fim▁hole|> { type: "arg", name: "count", value: expect.objectContaining({ type: "Identifier", }), format: "plural", options: {}, }, ]) }) it("message with unicode \\u chars is interpreted by babel", () => { const macro = createMacro() const exp = parseExpression("t`Message \\u0020`") const tokens = macro.tokenizeTemplateLiteral(exp) expect(tokens).toEqual([ { type: "text", value: "Message ", }, ]) }) it("message with unicode \\x chars is interpreted by babel", () => { const macro = createMacro() const exp = parseExpression("t`Bienvenue\\xA0!`") const tokens = macro.tokenizeTemplateLiteral(exp) expect(tokens).toEqual([ { type: "text", // Looks like an empty space, but it isn't value: "Bienvenue !", }, ]) }) it("message with double scaped literals it's stripped", () => { const macro = createMacro() const exp = parseExpression( "t`Passing \\`${argSet}\\` is not supported.`" ) const tokens = macro.tokenizeTemplateLiteral(exp) expect(tokens).toEqual([ { type: "text", value: "Passing `", }, { name: "argSet", type: "arg", value: { end: 20, loc: { end: { column: 20, line: 1, }, identifierName: "argSet", start: { column: 14, line: 1, }, }, name: "argSet", start: 14, type: "Identifier", }, }, { type: "text", value: "` is not supported.", }, ]) }) }) describe("tokenizeChoiceMethod", () => { it("plural", () => { const macro = createMacro() const exp = parseExpression( "plural(count, { one: '# book', other: '# books'})" ) const tokens = macro.tokenizeChoiceComponent(exp) expect(tokens).toEqual({ type: "arg", name: "count", value: expect.objectContaining({ name: "count", type: "Identifier", }), format: "plural", options: { one: "# book", other: "# books", }, }) }) it("plural with offset", () => { const macro = createMacro() const exp = parseExpression( `plural(count, { offset: 1, 0: 'No books', one: '# book', other: '# books' })` ) const tokens = macro.tokenizeChoiceComponent(exp) expect(tokens).toEqual({ type: "arg", name: "count", value: expect.objectContaining({ name: "count", type: "Identifier", }), format: "plural", options: { offset: 1, "=0": "No books", one: "# book", other: "# books", }, }) }) it("plural with template literal", () => { const macro = createMacro() const exp = parseExpression( "plural(count, { one: `# glass of ${drink}`, other: `# glasses of ${drink}`})" ) const tokens = macro.tokenizeChoiceComponent(exp) expect(tokens).toEqual({ type: "arg", name: "count", value: expect.objectContaining({ name: "count", type: "Identifier", }), format: "plural", options: { one: [ { type: "text", value: "# glass of ", }, { type: "arg", name: "drink", value: expect.objectContaining({ name: "drink", type: "Identifier", }), }, ], other: [ { type: "text", value: "# glasses of ", }, { type: "arg", name: "drink", value: expect.objectContaining({ name: "drink", type: "Identifier", }), }, ], }, }) }) it("plural with select", () => { const macro = createMacro() const exp = parseExpression( `plural(count, { one: select(gender, { male: "he", female: "she", other: "they" }) })` ) const tokens = macro.tokenizeChoiceComponent(exp) expect(tokens).toEqual({ type: "arg", name: "count", value: expect.objectContaining({ name: "count", type: "Identifier", }), format: "plural", options: { one: [ { type: "arg", name: "gender", value: expect.objectContaining({ name: "gender", type: "Identifier", }), format: "select", options: { male: "he", female: "she", other: "they", }, }, ], }, }) }) it("select", () => { const macro = createMacro() const exp = parseExpression( `select(gender, { male: "he", female: "she", other: "they" })` ) const tokens = macro.tokenizeChoiceComponent(exp) expect(tokens).toEqual({ format: "select", name: "gender", options: expect.objectContaining({ female: "she", male: "he", offset: undefined, other: "they", }), type: "arg", value: { end: 13, loc: { end: expect.objectContaining({ column: 13, line: 1, }), identifierName: "gender", start: expect.objectContaining({ column: 7, line: 1, }), }, name: "gender", start: 7, type: "Identifier", }, }) }) }) })<|fim▁end|>
type: "text", value: "Message ", },
<|file_name|>DomElementAnnotationHolder.java<|end_file_name|><|fim▁begin|>/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.xml.highlighting; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.ProblemHighlightType; import com.intellij.lang.annotation.Annotation; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiReference; import com.intellij.util.xml.DomElement; import com.intellij.util.xml.DomFileElement; import com.intellij.util.xml.GenericDomValue; import com.intellij.util.xml.reflect.DomCollectionChildDescription; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public interface DomElementAnnotationHolder extends Iterable<DomElementProblemDescriptor>{ boolean isOnTheFly(); @NotNull DomFileElement<?> getFileElement(); @NotNull DomElementProblemDescriptor createProblem(@NotNull DomElement domElement, @Nullable String message, LocalQuickFix... fixes); @NotNull DomElementProblemDescriptor createProblem(@NotNull DomElement domElement, DomCollectionChildDescription childDescription, @Nullable String message); @NotNull DomElementProblemDescriptor createProblem(@NotNull DomElement domElement, HighlightSeverity highlightType, String message); DomElementProblemDescriptor createProblem(@NotNull DomElement domElement, HighlightSeverity highlightType, String message, LocalQuickFix... fixes); DomElementProblemDescriptor createProblem(@NotNull DomElement domElement, HighlightSeverity highlightType, String message, TextRange textRange, LocalQuickFix... fixes); DomElementProblemDescriptor createProblem(@NotNull DomElement domElement, ProblemHighlightType highlightType, String message, @Nullable TextRange textRange, LocalQuickFix... fixes); @NotNull DomElementResolveProblemDescriptor createResolveProblem(@NotNull GenericDomValue element, @NotNull PsiReference reference);<|fim▁hole|> /** * Is useful only if called from {@link com.intellij.util.xml.highlighting.DomElementsAnnotator} instance * @param element element * @param severity highlight severity * @param message description * @return annotation */ @NotNull Annotation createAnnotation(@NotNull DomElement element, HighlightSeverity severity, @Nullable String message); int getSize(); }<|fim▁end|>
<|file_name|>SampleScript.py<|end_file_name|><|fim▁begin|># oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. # Copyright (c) 2016, Gluu # # Author: Yuriy Movchan # from org.gluu.model.custom.script.type.user import CacheRefreshType from org.gluu.util import StringHelper, ArrayHelper from java.util import Arrays, ArrayList from org.gluu.oxtrust.model import GluuCustomAttribute from org.gluu.model.custom.script.model.bind import BindCredentials import java class CacheRefresh(CacheRefreshType): def __init__(self, currentTimeMillis): self.currentTimeMillis = currentTimeMillis def init(self, configurationAttributes): print "Cache refresh. Initialization" print "Cache refresh. Initialized successfully" return True def destroy(self, configurationAttributes): print "Cache refresh. Destroy" print "Cache refresh. Destroyed successfully" return True # Get bind credentials required to access source server # configId is the source server # configurationAttributes is java.util.Map<String, SimpleCustomProperty> # return None (use password from configuration) or org.gluu.model.custom.script.model.bind.BindCredentials def getBindCredentials(self, configId, configurationAttributes): print "Cache refresh. GetBindCredentials method" return None # Update user entry before persist it<|fim▁hole|> # user is org.gluu.oxtrust.model.GluuCustomPerson # configurationAttributes is java.util.Map<String, SimpleCustomProperty> def updateUser(self, user, configurationAttributes): print "Cache refresh. UpdateUser method" attributes = user.getCustomAttributes() # Add new attribute preferredLanguage attrPrefferedLanguage = GluuCustomAttribute("preferredLanguage", "en-us") attributes.add(attrPrefferedLanguage) # Add new attribute userPassword attrUserPassword = GluuCustomAttribute("userPassword", "test") attributes.add(attrUserPassword) # Update givenName attribute for attribute in attributes: attrName = attribute.getName() if (("givenname" == StringHelper.toLowerCase(attrName)) and StringHelper.isNotEmpty(attribute.getValue())): attribute.setValue(StringHelper.removeMultipleSpaces(attribute.getValue()) + " (updated)") return True def getApiVersion(self): return 2<|fim▁end|>
<|file_name|>randomplayer.cpp<|end_file_name|><|fim▁begin|>#include "randomplayer.h" #include <QDirIterator> void RandomPlayer::start() { this->setMedia(QUrl::fromLocalFile(fileList.takeFirst())); this->play(); this->_readyToPlay = true; } void RandomPlayer::quitPlayMode() { this->_readyToPlay = false; this->stop(); } bool RandomPlayer::isPlayMode(){ return this->_readyToPlay; } void RandomPlayer::initList(bool includePiano, bool includeChants, bool includeMelodies) { QString basedir = iPlayer::getMusicRoot(); QStringList listFilter; listFilter << "*.mp3"; if(!includePiano && !includeChants && !includeMelodies) { includePiano = true; } if (includePiano) { QDirIterator dirIterator(basedir+"/cantiques/", listFilter ,QDir::Files | QDir::NoSymLinks, QDirIterator::Subdirectories); while(dirIterator.hasNext()) { fileList << dirIterator.next();<|fim▁hole|> } if (includeChants) { QDirIterator dirIterator(basedir+"/chants/", listFilter ,QDir::Files | QDir::NoSymLinks, QDirIterator::Subdirectories); while(dirIterator.hasNext()) { fileList << dirIterator.next(); } } if (includeMelodies) { QDirIterator dirIterator(basedir+"/melodies/", listFilter ,QDir::Files | QDir::NoSymLinks, QDirIterator::Subdirectories); while(dirIterator.hasNext()) { fileList << dirIterator.next(); } } std::random_shuffle(fileList.begin(), fileList.end()); }<|fim▁end|>
}
<|file_name|>version.py<|end_file_name|><|fim▁begin|><|fim▁hole|># not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. VERSION = '2.9.0'<|fim▁end|>
# Copyright 2014, Doug Wiegley, A10 Networks. # # Licensed under the Apache License, Version 2.0 (the "License"); you may
<|file_name|>teams.client.controller.js<|end_file_name|><|fim▁begin|>'use strict'; // Teams controller angular.module('teams').controller('TeamsController', ['$scope', '$stateParams', '$location', 'Authentication', 'Teams', 'Players', '$filter', function($scope, $stateParams, $location, Authentication, Teams, Players, $filter) { $scope.authentication = Authentication; // Create new Team $scope.create = function() { // Create new Team object var team = new Teams ({ name: this.name });<|fim▁hole|> $location.path('teams/' + response._id); // Clear form fields $scope.name = ''; }, function(errorResponse) { $scope.error = errorResponse.data.message; }); }; // Remove existing Team $scope.remove = function(team) { if ( team ) { team.$remove(); for (var i in $scope.teams) { if ($scope.teams [i] === team) { $scope.teams.splice(i, 1); } } } else { $scope.team.$remove(function() { $location.path('teams'); }); } }; // Update existing Team $scope.update = function() { var team = $scope.team; team.$update(function() { $location.path('teams/' + team._id); }, function(errorResponse) { $scope.error = errorResponse.data.message; }); }; // Find a list of Teams $scope.find = function() { $scope.teams = Teams.query(); }; // Find existing Team $scope.findOne = function() { $scope.team = Teams.get({ teamId: $stateParams.teamId }); $scope.players = Players.query({ 'team': $stateParams.teamId }); }; } ]);<|fim▁end|>
// Redirect after save team.$save(function(response) {
<|file_name|>PermitSearchResultsAddresses.js<|end_file_name|><|fim▁begin|>import React from 'react';<|fim▁hole|> if (props.data === null) { return <p>No results found</p>; } if (!props.data.length) { return <p>No results found</p>; } const addressList = props.data.map((result, index) => { const addressButtonStyle = (parseInt(result.civic_address_id) === parseInt(props.showPermitsForID)) ? 'active' : ''; return ( <li key={index}> <button type="button" title={`View permits for ${result.address}`} data-address={result.civic_address_id} className={`list-group-item list-group-item-action ${addressButtonStyle}`} onClick={props.handleAddressSelection} > {result.address}, {result.zipcode} (ID: {result.civic_address_id}) </button> {parseInt(result.civic_address_id) === parseInt(props.showPermitsForID) && <PermitSearchResultsByAddress key={index} civicAddressID={result.civic_address_id} /> } </li> ); }) return addressList; }; export default PermitSearchResultsAddress;<|fim▁end|>
import PermitSearchResultsByAddress from './PermitSearchResultsByAddress' function PermitSearchResultsAddress(props) {
<|file_name|>SiteUtils.java<|end_file_name|><|fim▁begin|>package com.example.stackexchange.util; import java.nio.file.Path; import java.nio.file.Paths; public class SiteUtils { private static String siteName; public static String getSiteName(String dir) { if (siteName == null) { Path path = Paths.get(dir); Path parent = path.getParent(); Path name = parent.getName(parent.getNameCount() - 1); siteName = name.toString(); } return siteName;<|fim▁hole|> } }<|fim▁end|>
} public static void setSiteName(String siteName) { SiteUtils.siteName = siteName;
<|file_name|>VasyaAndPetyasGame.cpp<|end_file_name|><|fim▁begin|>#include <bits/stdc++.h> #define EL cerr << endl; #define DB(x) cerr << "#" << (#x) << ": " << (x) << " "; #define DEB(x) cerr << "#" << (#x) << ": " << (x) << endl; #define PR(x) cout << (x) << endl #define X first #define Y second #define PB push_back #define MP make_pair<|fim▁hole|>using namespace std; typedef unsigned long long ull; typedef long double ld; typedef long long ll; typedef pair<int, int> ii; typedef pair<int, ii> iii; typedef vector<int> vi; typedef vector<ii> vii; typedef vector<vi> vvi; typedef vector<ll> vll; typedef pair<string, string> ss; const static int MAXN = 50400; int main() { //ios_base::sync_with_stdio(0); cin.tie(0); bool P[MAXN]; int i, n, j; set<int> PP; memset(P, true, sizeof P); cin >> n; for(i = 2; i < n*2; i++) for(j = 2; j < i; j++) if(i%j == 0) P[i] = false; for(i = 2; i < n*2; i++) if(P[i]){ int r = i; for(j = 0; j < 400; j++){ r *= i; PP.insert(r); } } vi R; for(i = 2; i <= n; i++) if(PP.count(i) || P[i]) R.push_back(i); cout << R.size() << endl; for(auto x : R) cout << x << " "; cout << endl; }<|fim▁end|>
<|file_name|>0017_comment.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('website', '0016_auto_20151128_2006'), ] operations = [ migrations.CreateModel( name='Comment', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('content', models.TextField()), ('service_comment_id', models.IntegerField()), ('username', models.CharField(max_length=255)), ('created', models.DateTimeField()), ('updated', models.DateTimeField()), ('issue', models.ForeignKey(to='website.Issue')),<|fim▁hole|><|fim▁end|>
], ), ]
<|file_name|>norace.go<|end_file_name|><|fim▁begin|><|fim▁hole|> //go:build !race // +build !race package testutil const RaceEnabled = false<|fim▁end|>
// Copyright 2018 syzkaller project authors. All rights reserved. // Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
<|file_name|>test.array.js<|end_file_name|><|fim▁begin|>/* global describe, it, require */<|fim▁hole|>// MODULES // var // Expectation library: chai = require( 'chai' ), // Deep close to: deepCloseTo = require( './utils/deepcloseto.js' ), // Module to be tested: log10 = require( './../lib/array.js' ); // VARIABLES // var expect = chai.expect, assert = chai.assert; // TESTS // describe( 'array log10', function tests() { it( 'should export a function', function test() { expect( log10 ).to.be.a( 'function' ); }); it( 'should compute the base-10 logarithm', function test() { var data, actual, expected; data = [ Math.pow( 10, 4 ), Math.pow( 10, 6 ), Math.pow( 10, 9 ), Math.pow( 10, 15 ), Math.pow( 10, 10 ), Math.pow( 10, 25 ) ]; actual = new Array( data.length ); actual = log10( actual, data ); expected = [ 4, 6, 9, 15, 10, 25 ]; assert.isTrue( deepCloseTo( actual, expected, 1e-7 ) ); }); it( 'should return an empty array if provided an empty array', function test() { assert.deepEqual( log10( [], [] ), [] ); }); it( 'should handle non-numeric values by setting the element to NaN', function test() { var data, actual, expected; data = [ true, null, [], {} ]; actual = new Array( data.length ); actual = log10( actual, data ); expected = [ NaN, NaN, NaN, NaN ]; assert.deepEqual( actual, expected ); }); });<|fim▁end|>
'use strict';
<|file_name|>DocumentationNestedScrollLayout.tsx<|end_file_name|><|fim▁begin|>// NOTE(jim): // GETTING NESTED SCROLL RIGHT IS DELICATE BUSINESS. THEREFORE THIS COMPONENT // IS THE ONLY PLACE WHERE SCROLL CODE SHOULD BE HANDLED. THANKS. import { Global, css } from '@emotion/core'; import { theme } from '@expo/styleguide'; import * as React from 'react'; import * as Constants from '~/constants/theme'; const STYLES_GLOBAL = css` html { background: ${theme.background.default}; } @media screen and (max-width: ${Constants.breakpoints.mobile}) { html { /* width */ ::-webkit-scrollbar { width: 6px; } /* Track */ ::-webkit-scrollbar-track { background: ${theme.background.default}; } /* Handle */ ::-webkit-scrollbar-thumb { background: ${theme.background.tertiary}; border-radius: 10px; } /* Handle on hover */ ::-webkit-scrollbar-thumb:hover { background: ${theme.background.quaternary}; } } } `; const STYLES_CONTAINER = css` width: 100%; height: 100vh; overflow: hidden; margin: 0 auto 0 auto; border-right: 1px solid ${theme.border.default}; background: ${theme.background.default}; display: flex; align-items: center; justify-content: space-between; flex-direction: column; @media screen and (max-width: 1440px) { border-left: 0px; border-right: 0px; } @media screen and (max-width: ${Constants.breakpoints.mobile}) { display: block; height: auto; } `; const STYLES_HEADER = css` flex-shrink: 0; width: 100%; @media screen and (min-width: ${Constants.breakpoints.mobile}) { border-bottom: 1px solid ${theme.border.default}; } @media screen and (max-width: ${Constants.breakpoints.mobile}) { position: sticky; top: -57px; z-index: 3; } `; const SHOW_SEARCH_AND_MENU = css` @media screen and (max-width: ${Constants.breakpoints.mobile}) { top: 0px; } `; const STYLES_CONTENT = css` display: flex; align-items: flex-start; margin: 0 auto; justify-content: space-between; width: 100%; height: 100%; min-height: 25%; @media screen and (max-width: ${Constants.breakpoints.mobile}) { height: auto; } `; const STYLES_SIDEBAR = css` flex-shrink: 0; max-width: 280px; height: 100%; overflow: hidden; transition: 200ms ease max-width; background: ${theme.background.canvas}; @media screen and (max-width: 1200px) { max-width: 280px; } @media screen and (max-width: ${Constants.breakpoints.mobile}) { display: none; } `; const STYLES_LEFT = css` border-right: 1px solid ${theme.border.default}; `; const STYLES_RIGHT = css` border-left: 1px solid ${theme.border.default}; background-color: ${theme.background.default}; `; const STYLES_CENTER = css` background: ${theme.background.default}; min-width: 5%; width: 100%; height: 100%; overflow: hidden; display: flex; @media screen and (max-width: ${Constants.breakpoints.mobile}) { height: auto;<|fim▁hole|>`; // NOTE(jim): // All the other components tame the UI. this one allows a container to scroll. const STYLES_SCROLL_CONTAINER = css` height: 100%; width: 100%; padding-bottom: 36px; overflow-y: scroll; overflow-x: hidden; -webkit-overflow-scrolling: touch; /* width */ ::-webkit-scrollbar { width: 6px; } /* Track */ ::-webkit-scrollbar-track { background: transparent; cursor: pointer; } /* Handle */ ::-webkit-scrollbar-thumb { background: ${theme.background.tertiary}; border-radius: 10px; } /* Handle on hover */ ::-webkit-scrollbar-thumb:hover { background: ${theme.background.quaternary}; } @media screen and (max-width: ${Constants.breakpoints.mobile}) { overflow-y: auto; } `; const STYLES_CENTER_WRAPPER = css` max-width: 1200px; margin: auto; `; type ScrollContainerProps = { scrollPosition?: number; scrollHandler?: () => void; }; class ScrollContainer extends React.Component<ScrollContainerProps> { scrollRef = React.createRef<HTMLDivElement>(); componentDidMount() { if (this.props.scrollPosition && this.scrollRef.current) { this.scrollRef.current.scrollTop = this.props.scrollPosition; } } public getScrollTop = () => { return this.scrollRef.current?.scrollTop ?? 0; }; public getScrollRef = () => { return this.scrollRef; }; render() { return ( <div css={STYLES_SCROLL_CONTAINER} ref={this.scrollRef} onScroll={this.props.scrollHandler}> {this.props.children} </div> ); } } type Props = { onContentScroll?: (scrollTop: number) => void; isMenuActive: boolean; tocVisible: boolean; isMobileSearchActive: boolean; header: React.ReactNode; sidebarScrollPosition: number; sidebar: React.ReactNode; sidebarRight: React.ReactElement; }; export default class DocumentationNestedScrollLayout extends React.Component<Props> { static defaultProps = { sidebarScrollPosition: 0, }; sidebarRef = React.createRef<ScrollContainer>(); contentRef = React.createRef<ScrollContainer>(); sidebarRightRef = React.createRef<ScrollContainer>(); public getSidebarScrollTop = () => { return this.sidebarRef.current?.getScrollTop() ?? 0; }; public getContentScrollTop = () => { return this.contentRef.current?.getScrollTop() ?? 0; }; render() { const { isMobileSearchActive, isMenuActive, sidebarScrollPosition } = this.props; if (isMenuActive) { window.scrollTo(0, 0); } return ( <div css={STYLES_CONTAINER}> <Global styles={STYLES_GLOBAL} /> <div css={[STYLES_HEADER, (isMobileSearchActive || isMenuActive) && SHOW_SEARCH_AND_MENU]}> {this.props.header} </div> <div css={STYLES_CONTENT}> <div css={[STYLES_SIDEBAR, STYLES_LEFT]}> <ScrollContainer ref={this.sidebarRef} scrollPosition={sidebarScrollPosition}> {this.props.sidebar} </ScrollContainer> </div> <div css={STYLES_CENTER}> <ScrollContainer ref={this.contentRef} scrollHandler={this.scrollHandler}> <div css={STYLES_CENTER_WRAPPER}>{this.props.children}</div> </ScrollContainer> </div> {this.props.tocVisible && ( <div css={[STYLES_SIDEBAR, STYLES_RIGHT]}> <ScrollContainer ref={this.sidebarRightRef}> {React.cloneElement(this.props.sidebarRight, { selfRef: this.sidebarRightRef, contentRef: this.contentRef, })} </ScrollContainer> </div> )} </div> </div> ); } private scrollHandler = () => { this.props.onContentScroll && this.props.onContentScroll(this.getContentScrollTop()); }; }<|fim▁end|>
overflow: auto; }
<|file_name|>no-unexpected-multiline.js<|end_file_name|><|fim▁begin|>/** * @fileoverview Rule to spot scenarios where a newline looks like it is ending a statement, but is not. * @author Glen Mailer * @copyright 2015 Glen Mailer<|fim▁hole|>//------------------------------------------------------------------------------ // Rule Definition //------------------------------------------------------------------------------ module.exports = function(context) { var FUNCTION_MESSAGE = "Unexpected newline between function and ( of function call."; var PROPERTY_MESSAGE = "Unexpected newline between object and [ of property access."; /** * Check to see if the bracket prior to the node is continuing the previous * line's expression * @param {ASTNode} node The node to check. * @param {string} msg The error message to use. * @returns {void} * @private */ function checkForBreakBefore(node, msg) { var tokens = context.getTokensBefore(node, 2); var paren = tokens[1]; var before = tokens[0]; if (paren.loc.start.line !== before.loc.end.line) { context.report(node, paren.loc.start, msg, { char: paren.value }); } } //-------------------------------------------------------------------------- // Public API //-------------------------------------------------------------------------- return { "MemberExpression": function(node) { if (!node.computed) { return; } checkForBreakBefore(node.property, PROPERTY_MESSAGE); }, "CallExpression": function(node) { if (node.arguments.length === 0) { return; } checkForBreakBefore(node.arguments[0], FUNCTION_MESSAGE); } }; }; module.exports.schema = [];<|fim▁end|>
*/ "use strict";
<|file_name|>layout.module.js<|end_file_name|><|fim▁begin|><|fim▁hole|> .module('brew_journal.layout', [ 'brew_journal.layout.controllers' ]); angular .module('brew_journal.layout.controllers', []); })();<|fim▁end|>
(function () { 'use strict'; angular
<|file_name|>gdrive.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the Google Drive database plugin.""" from __future__ import unicode_literals import unittest from plaso.formatters import gdrive as _ # pylint: disable=unused-import from plaso.lib import definitions from plaso.parsers.sqlite_plugins import gdrive from tests.parsers.sqlite_plugins import test_lib class GoogleDrivePluginTest(test_lib.SQLitePluginTestCase): """Tests for the Google Drive database plugin.""" def testProcess(self): """Tests the Process function on a Google Drive database file.""" plugin = gdrive.GoogleDrivePlugin() storage_writer = self._ParseDatabaseFileWithPlugin(['snapshot.db'], plugin) self.assertEqual(storage_writer.number_of_warnings, 0) self.assertEqual(storage_writer.number_of_events, 30) # Let's verify that we've got the correct balance of cloud and local # entry events. # 10 files mounting to: # 20 Cloud Entries (two timestamps per entry). # 10 Local Entries (one timestamp per entry). local_entries = [] cloud_entries = [] for event in storage_writer.GetEvents(): event_data = self._GetEventDataOfEvent(storage_writer, event) if event_data.data_type == 'gdrive:snapshot:local_entry': local_entries.append(event) else: cloud_entries.append(event) self.assertEqual(len(local_entries), 10) self.assertEqual(len(cloud_entries), 20) # Test one local and one cloud entry. event = local_entries[5] self.CheckTimestamp(event.timestamp, '2014-01-28 00:11:25.000000') event_data = self._GetEventDataOfEvent(storage_writer, event) file_path = ( '%local_sync_root%/Top Secret/Enn meiri ' 'leyndarmál/Sýnileiki - Örverpi.gdoc') self.assertEqual(event_data.path, file_path) expected_message = 'File Path: {0:s} Size: 184'.format(file_path) self._TestGetMessageStrings( event_data, expected_message, file_path) event = cloud_entries[16] self.CheckTimestamp(event.timestamp, '2014-01-28 00:12:27.000000') self.assertEqual( event.timestamp_desc, definitions.TIME_DESCRIPTION_MODIFICATION) event_data = self._GetEventDataOfEvent(storage_writer, event) self.assertEqual(event_data.document_type, 6) expected_url = ( 'https://docs.google.com/document/d/' '1ypXwXhQWliiMSQN9S5M0K6Wh39XF4Uz4GmY-njMf-Z0/edit?usp=docslist_api') self.assertEqual(event_data.url, expected_url) expected_message = ( 'File Path: /Almenningur/Saklausa hliðin ' '[Private] ' 'Size: 0 ' 'URL: {0:s} ' 'Type: DOCUMENT').format(expected_url) expected_short_message = '/Almenningur/Saklausa hliðin' self._TestGetMessageStrings( event_data, expected_message, expected_short_message) if __name__ == '__main__':<|fim▁hole|><|fim▁end|>
unittest.main()
<|file_name|>mir_codegen_switch.rs<|end_file_name|><|fim▁begin|>// run-pass enum Abc { A(u8), B(i8), C, D, }<|fim▁hole|> Abc::D => 4, Abc::B(_) => 2, Abc::A(_) => 1, } } fn foo2(x: Abc) -> bool { match x { Abc::D => true, _ => false } } fn main() { assert_eq!(1, foo(Abc::A(42))); assert_eq!(2, foo(Abc::B(-100))); assert_eq!(3, foo(Abc::C)); assert_eq!(4, foo(Abc::D)); assert_eq!(false, foo2(Abc::A(1))); assert_eq!(false, foo2(Abc::B(2))); assert_eq!(false, foo2(Abc::C)); assert_eq!(true, foo2(Abc::D)); }<|fim▁end|>
fn foo(x: Abc) -> i32 { match x { Abc::C => 3,
<|file_name|>reforms.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """Reforms controller""" import collections from .. import contexts, conv, model, wsgihelpers @wsgihelpers.wsgify def api1_reforms(req): ctx = contexts.Ctx(req) headers = wsgihelpers.handle_cross_origin_resource_sharing(ctx) assert req.method == 'GET', req.method params = req.GET inputs = dict( context = params.get('context'), ) data, errors = conv.pipe( conv.struct( dict( context = conv.noop, # For asynchronous calls ), default = 'drop', ), )(inputs, state = ctx) if errors is not None: return wsgihelpers.respond_json(ctx, collections.OrderedDict(sorted(dict( apiVersion = 1, context = inputs.get('context'), error = collections.OrderedDict(sorted(dict( code = 400, # Bad Request errors = [conv.jsonify_value(errors)], message = ctx._(u'Bad parameters in request'), ).iteritems())), method = req.script_name, params = inputs, url = req.url.decode('utf-8'), ).iteritems())), headers = headers, ) build_reform_function_by_key = model.build_reform_function_by_key declared_reforms_key = build_reform_function_by_key.keys() \ if build_reform_function_by_key is not None \ else None reforms = collections.OrderedDict(sorted({ reform_key: reform.name for reform_key, reform in model.reform_by_full_key.iteritems() }.iteritems())) if declared_reforms_key is not None else None return wsgihelpers.respond_json(ctx, collections.OrderedDict(sorted(dict( apiVersion = 1, context = data['context'],<|fim▁hole|> url = req.url.decode('utf-8'), ).iteritems())), headers = headers, )<|fim▁end|>
method = req.script_name, params = inputs, reforms = reforms,
<|file_name|>clear.js<|end_file_name|><|fim▁begin|>'use strict'; const test = require('ava'); const hashSet = require('../index'); const MySet = hashSet(x => x); test('should not change empty set', t => { const set = new MySet(); set.clear(); t.is(set.size, 0); }); <|fim▁hole|> set.clear(); t.is(set.size, 0); });<|fim▁end|>
test('should clear set', t => { const set = new MySet(); set.add(1);
<|file_name|>package-info.java<|end_file_name|><|fim▁begin|>// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.14.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. /** * This package contains the classes for AutoRestParameterGroupingTestService.<|fim▁hole|> */ package fixtures.azureparametergrouping;<|fim▁end|>
* Test Infrastructure for AutoRest.
<|file_name|>argumentToUnnamedParameter.py<|end_file_name|><|fim▁begin|>from typing import NewType SomeType = NewType("SomeType", bytes)<|fim▁hole|><|fim▁end|>
SomeType(b"va<caret>lue")
<|file_name|>test_repositories.py<|end_file_name|><|fim▁begin|>import pytest from cfme.infrastructure import repositories from utils.update import update from utils.wait import TimedOutError, wait_for @pytest.mark.tier(2) @pytest.mark.meta(blockers=[1188427]) def test_repository_crud(soft_assert, random_string, request): repo_name = 'Test Repo {}'.format(random_string) repo = repositories.Repository(repo_name, '//testhost/share/path') request.addfinalizer(repo.delete) # create repo.create() # read assert repo.exists # update with update(repo): repo.name = 'Updated {}'.format(repo_name) with soft_assert.catch_assert(): assert repo.exists, 'Repository rename failed' # Only change the name back if renaming succeeded with update(repo):<|fim▁hole|> repo.delete() try: wait_for(lambda: not repo.exists) except TimedOutError: raise AssertionError('failed to delete repository')<|fim▁end|>
repo.name = repo_name # delete
<|file_name|>demand.py<|end_file_name|><|fim▁begin|># coding: utf-8 from sqlalchemy import Column, Float, Integer, Numeric, String, Table, Text from geoalchemy2.types import Geometry from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() metadata = Base.metadata class EgoDemandFederalstate(Base): __tablename__ = 'ego_demand_federalstate' __table_args__ = {'schema': 'demand'} eu_code = Column(String(7), primary_key=True) federal_states = Column(String) elec_consumption_households = Column(Float(53)) elec_consumption_industry = Column(Float(53)) elec_consumption_tertiary_sector = Column(Float(53)) population = Column(Integer) elec_consumption_households_per_person = Column(Float(53)) class EgoDpLoadarea(Base): __tablename__ = 'ego_dp_loadarea' __table_args__ = {'schema': 'demand'} version = Column(Text, primary_key=True, nullable=False) id = Column(Integer, primary_key=True, nullable=False) subst_id = Column(Integer) area_ha = Column(Float(53)) nuts = Column(String(5)) rs_0 = Column(String(12)) ags_0 = Column(String(12)) otg_id = Column(Integer) un_id = Column(Integer) zensus_sum = Column(Integer) zensus_count = Column(Integer) zensus_density = Column(Float(53)) ioer_sum = Column(Float(53)) ioer_count = Column(Integer) ioer_density = Column(Float(53)) sector_area_residential = Column(Float(53)) sector_area_retail = Column(Float(53)) sector_area_industrial = Column(Float(53)) sector_area_agricultural = Column(Float(53)) sector_area_sum = Column(Float(53)) sector_share_residential = Column(Float(53)) sector_share_retail = Column(Float(53)) sector_share_industrial = Column(Float(53)) sector_share_agricultural = Column(Float(53)) sector_share_sum = Column(Float(53)) sector_count_residential = Column(Integer) sector_count_retail = Column(Integer) sector_count_industrial = Column(Integer) sector_count_agricultural = Column(Integer) sector_count_sum = Column(Integer) sector_consumption_residential = Column(Float(53)) sector_consumption_retail = Column(Float(53)) sector_consumption_industrial = Column(Float(53)) sector_consumption_agricultural = Column(Float(53)) sector_consumption_sum = Column(Float(53)) sector_peakload_retail = Column(Float(53)) sector_peakload_residential = Column(Float(53)) sector_peakload_industrial = Column(Float(53)) sector_peakload_agricultural = Column(Float(53)) geom_centroid = Column(Geometry('POINT', 3035))<|fim▁hole|> t_ego_dp_loadarea_v0_4_3_mview = Table( 'ego_dp_loadarea_v0_4_3_mview', metadata, Column('version', Text), Column('id', Integer, unique=True), Column('subst_id', Integer), Column('area_ha', Numeric), Column('nuts', String(5)), Column('rs_0', String(12)), Column('ags_0', String(12)), Column('otg_id', Integer), Column('un_id', Integer), Column('zensus_sum', Integer), Column('zensus_count', Integer), Column('zensus_density', Numeric), Column('ioer_sum', Numeric), Column('ioer_count', Integer), Column('ioer_density', Numeric), Column('sector_area_residential', Numeric), Column('sector_area_retail', Numeric), Column('sector_area_industrial', Numeric), Column('sector_area_agricultural', Numeric), Column('sector_area_sum', Numeric), Column('sector_share_residential', Numeric), Column('sector_share_retail', Numeric), Column('sector_share_industrial', Numeric), Column('sector_share_agricultural', Numeric), Column('sector_share_sum', Numeric), Column('sector_count_residential', Integer), Column('sector_count_retail', Integer), Column('sector_count_industrial', Integer), Column('sector_count_agricultural', Integer), Column('sector_count_sum', Integer), Column('sector_consumption_residential', Float(53)), Column('sector_consumption_retail', Float(53)), Column('sector_consumption_industrial', Float(53)), Column('sector_consumption_agricultural', Float(53)), Column('sector_consumption_sum', Float(53)), Column('sector_peakload_retail', Float(53)), Column('sector_peakload_residential', Float(53)), Column('sector_peakload_industrial', Float(53)), Column('sector_peakload_agricultural', Float(53)), Column('geom_centroid', Geometry('POINT', 3035)), Column('geom_surfacepoint', Geometry('POINT', 3035)), Column('geom_centre', Geometry('POINT', 3035)), Column('geom', Geometry('POLYGON', 3035), index=True), schema='demand' ) t_ego_dp_loadarea_v0_4_5_mview = Table( 'ego_dp_loadarea_v0_4_5_mview', metadata, Column('version', Text), Column('id', Integer, unique=True), Column('subst_id', Integer), Column('area_ha', Numeric), Column('nuts', String(5)), Column('rs_0', String(12)), Column('ags_0', String(12)), Column('otg_id', Integer), Column('un_id', Integer), Column('zensus_sum', Integer), Column('zensus_count', Integer), Column('zensus_density', Numeric), Column('ioer_sum', Numeric), Column('ioer_count', Integer), Column('ioer_density', Numeric), Column('sector_area_residential', Numeric), Column('sector_area_retail', Numeric), Column('sector_area_industrial', Numeric), Column('sector_area_agricultural', Numeric), Column('sector_area_sum', Numeric), Column('sector_share_residential', Numeric), Column('sector_share_retail', Numeric), Column('sector_share_industrial', Numeric), Column('sector_share_agricultural', Numeric), Column('sector_share_sum', Numeric), Column('sector_count_residential', Integer), Column('sector_count_retail', Integer), Column('sector_count_industrial', Integer), Column('sector_count_agricultural', Integer), Column('sector_count_sum', Integer), Column('sector_consumption_residential', Float(53)), Column('sector_consumption_retail', Float(53)), Column('sector_consumption_industrial', Float(53)), Column('sector_consumption_agricultural', Float(53)), Column('sector_consumption_sum', Float(53)), Column('sector_peakload_retail', Float(53)), Column('sector_peakload_residential', Float(53)), Column('sector_peakload_industrial', Float(53)), Column('sector_peakload_agricultural', Float(53)), Column('geom_centroid', Geometry('POINT', 3035)), Column('geom_surfacepoint', Geometry('POINT', 3035)), Column('geom_centre', Geometry('POINT', 3035)), Column('geom', Geometry('POLYGON', 3035), index=True), schema='demand' )<|fim▁end|>
geom_surfacepoint = Column(Geometry('POINT', 3035)) geom_centre = Column(Geometry('POINT', 3035)) geom = Column(Geometry('POLYGON', 3035), index=True)
<|file_name|>webgluniformlocation.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ <|fim▁hole|>use dom::bindings::global::GlobalRef; use dom::bindings::js::Root; use dom::bindings::reflector::{Reflector, reflect_dom_object}; #[dom_struct] pub struct WebGLUniformLocation { reflector_: Reflector, id: i32, program_id: u32, } impl WebGLUniformLocation { fn new_inherited(id: i32, program_id: u32) -> WebGLUniformLocation { WebGLUniformLocation { reflector_: Reflector::new(), id: id, program_id: program_id, } } pub fn new(global: GlobalRef, id: i32, program_id: u32) -> Root<WebGLUniformLocation> { reflect_dom_object( box WebGLUniformLocation::new_inherited(id, program_id), global, WebGLUniformLocationBinding::Wrap) } } impl WebGLUniformLocation { pub fn id(&self) -> i32 { self.id } pub fn program_id(&self) -> u32 { self.program_id } }<|fim▁end|>
// https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl use dom::bindings::codegen::Bindings::WebGLUniformLocationBinding;
<|file_name|>throwIfNonUnexpectedError.js<|end_file_name|><|fim▁begin|>module.exports = function throwIfNonUnexpectedError(err) { if (err && err.message === 'aggregate error') {<|fim▁hole|> throw err; } };<|fim▁end|>
for (var i = 0 ; i < err.length ; i += 1) { throwIfNonUnexpectedError(err[i]); } } else if (!err || !err._isUnexpected) {
<|file_name|>errors.go<|end_file_name|><|fim▁begin|>package iternada <|fim▁hole|>import ( "errors" ) var ( errNilReceiver = errors.New("Nil Receiver") )<|fim▁end|>
<|file_name|>ex_synchronize.py<|end_file_name|><|fim▁begin|># # A test file for the `processing` package # import time, sys, random from Queue import Empty import processing # may get overwritten #### TEST_VALUE def value_func(running, mutex): random.seed() time.sleep(random.random()*4) mutex.acquire() print '\n\t\t\t' + str(processing.currentProcess()) + ' has finished' running.value -= 1 mutex.release() def test_value(): TASKS = 10 running = processing.Value('i', TASKS) mutex = processing.Lock() for i in range(TASKS): processing.Process(target=value_func, args=(running, mutex)).start() while running.value > 0: time.sleep(0.08) mutex.acquire() print running.value, sys.stdout.flush() mutex.release() print print 'No more running processes' #### TEST_QUEUE def queue_func(queue): for i in range(30): time.sleep(0.5 * random.random()) queue.put(i*i) queue.put('STOP') def test_queue(): q = processing.Queue() p = processing.Process(target=queue_func, args=(q,)) p.start() o = None while o != 'STOP': try: o = q.get(timeout=0.3) print o, sys.stdout.flush() <|fim▁hole|> print 'TIMEOUT' print #### TEST_CONDITION def condition_func(cond): cond.acquire() print '\t' + str(cond) time.sleep(2) print '\tchild is notifying' print '\t' + str(cond) cond.notify() cond.release() def test_condition(): cond = processing.Condition() p = processing.Process(target=condition_func, args=(cond,)) print cond cond.acquire() print cond cond.acquire() print cond p.start() print 'main is waiting' cond.wait() print 'main has woken up' print cond cond.release() print cond cond.release() p.join() print cond #### TEST_SEMAPHORE def semaphore_func(sema, mutex, running): sema.acquire() mutex.acquire() running.value += 1 print running.value, 'tasks are running' mutex.release() random.seed() time.sleep(random.random()*2) mutex.acquire() running.value -= 1 print '%s has finished' % processing.currentProcess() mutex.release() sema.release() def test_semaphore(): sema = processing.Semaphore(3) mutex = processing.RLock() running = processing.Value('i', 0) processes = [ processing.Process(target=semaphore_func, args=(sema, mutex, running)) for i in range(10) ] for p in processes: p.start() for p in processes: p.join() #### TEST_JOIN_TIMEOUT def join_timeout_func(): print '\tchild sleeping' time.sleep(5.5) print '\n\tchild terminating' def test_join_timeout(): p = processing.Process(target=join_timeout_func) p.start() print 'waiting for process to finish' while 1: p.join(timeout=1) if not p.isAlive(): break print '.', sys.stdout.flush() #### TEST_EVENT def event_func(event): print '\t%r is waiting' % processing.currentProcess() event.wait() print '\t%r has woken up' % processing.currentProcess() def test_event(): event = processing.Event() processes = [processing.Process(target=event_func, args=(event,)) for i in range(5)] for p in processes: p.start() print 'main is sleeping' time.sleep(2) print 'main is setting event' event.set() for p in processes: p.join() #### TEST_SHAREDVALUES def sharedvalues_func(values, arrays, shared_values, shared_arrays): for i in range(len(values)): v = values[i][1] sv = shared_values[i].value assert v == sv for i in range(len(values)): a = arrays[i][1] sa = list(shared_arrays[i][:]) assert a == sa print 'Tests passed' def test_sharedvalues(): values = [ ('i', 10), ('h', -2), ('d', 1.25) ] arrays = [ ('i', range(100)), ('d', [0.25 * i for i in range(100)]), ('H', range(1000)) ] shared_values = [processing.Value(id, v) for id, v in values] shared_arrays = [processing.Array(id, a) for id, a in arrays] p = processing.Process( target=sharedvalues_func, args=(values, arrays, shared_values, shared_arrays) ) p.start() p.join() assert p.getExitCode() == 0 #### def test(namespace=processing): global processing processing = namespace for func in [ test_value, test_queue, test_condition, test_semaphore, test_join_timeout, test_event, test_sharedvalues ]: print '\n\t######## %s\n' % func.__name__ func() ignore = processing.activeChildren() # cleanup any old processes if hasattr(processing, '_debugInfo'): info = processing._debugInfo() if info: print info raise ValueError, 'there should be no positive refcounts left' if __name__ == '__main__': processing.freezeSupport() assert len(sys.argv) in (1, 2) if len(sys.argv) == 1 or sys.argv[1] == 'processes': print ' Using processes '.center(79, '-') namespace = processing elif sys.argv[1] == 'manager': print ' Using processes and a manager '.center(79, '-') namespace = processing.Manager() namespace.Process = processing.Process namespace.currentProcess = processing.currentProcess namespace.activeChildren = processing.activeChildren elif sys.argv[1] == 'threads': print ' Using threads '.center(79, '-') import processing.dummy as namespace else: print 'Usage:\n\t%s [processes | manager | threads]' % sys.argv[0] raise SystemExit, 2 test(namespace)<|fim▁end|>
except Empty:
<|file_name|>util.js<|end_file_name|><|fim▁begin|>'use strict'; // 頑シミュさんの装飾品検索の結果と比較しやすくする function simplifyDecombs(decombs) { return decombs.map(decomb => { let torsoUp = Object.keys(decomb).map(part => decomb[part]).some(comb => { if (comb == null) return false; return comb.skills['胴系統倍加'] ? true : false; }); let names = []; Object.keys(decomb).forEach(part => { let comb = decomb[part];<|fim▁hole|> return names.sort().join(','); }); } exports.simplifyDecombs = simplifyDecombs;<|fim▁end|>
let decos = comb ? comb.decos : []; if (torsoUp && part === 'body') decos = decos.map(deco => deco += '(胴)'); names = names.concat(decos); });
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup setup( name = "smbios_validation_tool", author = "Xu Han", author_email = "", license = "Apache", url = "https://github.com/google/smbios-validation-tool",<|fim▁hole|>)<|fim▁end|>
packages=['smbios_validation_tool', 'dmiparse'], scripts=['smbios_validation'],
<|file_name|>osutil.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import print_function import collections import logging import os import platform import re import subprocess import types import util import json from ebstall.versions import Version from ebstall.util import normalize_string logger = logging.getLogger(__name__) CLI_DEFAULTS_DEFAULT = dict( packager='source' ) CLI_DEFAULTS_DEBIAN = dict( packager='apt-get' ) CLI_DEFAULTS_CENTOS = dict( packager='yum' ) CLI_DEFAULTS_DARWIN = dict( packager='source' ) FLAVORS = { 'debian': 'debian', 'ubuntu': 'debian', 'kubuntu': 'debian', 'kali': 'debian', 'centos': 'redhat', 'centos linux': 'redhat', 'fedora': 'redhat', 'red hat enterprise linux server': 'redhat', 'rhel': 'redhat', 'amazon': 'redhat', 'amzn': 'redhat', 'gentoo': 'gentoo', 'gentoo base system': 'gentoo', 'darwin': 'darwin', 'opensuse': 'suse', 'suse': 'suse', } CLI_DEFAULTS = { "default": CLI_DEFAULTS_DEFAULT, "debian": CLI_DEFAULTS_DEBIAN, "ubuntu": CLI_DEFAULTS_DEBIAN, "centos": CLI_DEFAULTS_CENTOS, "centos linux": CLI_DEFAULTS_CENTOS, "fedora": CLI_DEFAULTS_CENTOS, "red hat enterprise linux server": CLI_DEFAULTS_CENTOS, "rhel": CLI_DEFAULTS_CENTOS, "amazon": CLI_DEFAULTS_CENTOS, "amzn": CLI_DEFAULTS_CENTOS, "gentoo": CLI_DEFAULTS_DEFAULT, "gentoo base system": CLI_DEFAULTS_DEFAULT, "darwin": CLI_DEFAULTS_DARWIN, "opensuse": CLI_DEFAULTS_DEFAULT, "suse": CLI_DEFAULTS_DEFAULT, } """CLI defaults.""" # Start system START_INITD = 'init.d' START_SYSTEMD = 'systemd' # Pkg manager PKG_YUM = 'yum' PKG_APT = 'apt-get' FAMILY_REDHAT = 'redhat' FAMILY_DEBIAN = 'debian' # redhat / debian YUMS = ['redhat', 'fedora', 'centos', 'rhel', 'amzn', 'amazon'] DEBS = ['debian', 'ubuntu', 'kali'] class OSInfo(object): """OS information, name, version, like - similarity""" def __init__(self, name=None, version=None, version_major=None, like=None, family=None, packager=None, start_system=None, has_os_release=False, fallback_detection=False, long_name=None, *args, **kwargs): self.name = name self.long_name = long_name self.version_major = version_major self.version = version self.like = like self.family = family self.packager = packager self.start_system = start_system self.has_os_release = has_os_release self.fallback_detection = fallback_detection def __str__(self): return 'OSInfo(%r)' % json.dumps(self.to_json()) def __repr__(self): return 'OSInfo(%r)' % json.dumps(self.to_json()) def to_json(self): """ Converts to the JSON :return: """ js = collections.OrderedDict() js['name'] = self.name js['long_name'] = self.long_name js['version_major'] = self.version_major js['version'] = self.version js['like'] = self.like js['family'] = self.family js['packager'] = self.packager js['start_system'] = self.start_system js['has_os_release'] = self.has_os_release js['fallback_detection'] = self.fallback_detection return js class PackageInfo(object): """ Basic information about particular package """ def __init__(self, name, version, arch, repo, size=None, section=None): self._version = None self.name = name self.version = version self.arch = arch self.repo = repo self.size = size self.section = section @property def version(self): return self._version @version.setter def version(self, val): self._version = Version(val) def __str__(self): return '%s-%s.%s' % (self.name, self.version, self.arch) def __repr__(self): return 'PackageInfo(name=%r, version=%r, arch=%r, repo=%r, size=%r, section=%r)' \ % (self.name, self.version, self.arch, self.repo, self.size, self.section) def to_json(self): """ Converts to the JSON :return: """ js = collections.OrderedDict() js['name'] = self.name js['version'] = str(self.version) js['arch'] = self.arch js['repo'] = self.repo if self.size is not None: js['size'] = self.size if self.section is not None: js['section'] = self.section return js @classmethod def from_json(cls, js): """ Converts json dict to the object :param js: :return: """ obj = cls(name=js['name'], version=js['version'], arch=js['arch'], repo=js['repo']) if 'size' in js: obj.size = js['size'] if 'section' in js: obj.section = js['section'] return obj def get_os(): """ Returns basic information about the OS. :return: OSInfo """ # At first - parse os-release ros = OSInfo() os_release_path = '/etc/os-release' if os.path.isfile(os_release_path): ros.name = _get_systemd_os_release_var("ID", filepath=os_release_path) ros.version = _get_systemd_os_release_var("VERSION_ID", filepath=os_release_path) ros.like = _get_systemd_os_release_var("ID_LIKE", os_release_path).split(" ") ros.long_name = _get_systemd_os_release_var("PRETTY_NAME", filepath=os_release_path) ros.has_os_release = True if not ros.long_name: ros.long_name = _get_systemd_os_release_var("NAME", filepath=os_release_path) # Try /etc/redhat-release and /etc/debian_version if not ros.has_os_release or ros.like is None or ros.version is None or ros.name is None: os_redhat_release(ros) os_debian_version(ros) os_issue(ros) # like detection os_like_detect(ros) os_family_detect(ros) # Major version os_major_version(ros) # Packager detection - yum / apt-get os_packager(ros) # Start system - init.d / systemd os_start_system(ros) return ros def os_family_detect(ros): """ OS Family (redhat, debian, ...) :param ros: :return: """ if util.startswith(ros.like, YUMS): ros.family = FAMILY_REDHAT if util.startswith(ros.like, DEBS): ros.family = FAMILY_DEBIAN if ros.family is not None: if sum([1 for x in YUMS if ros.name.lower().startswith(x)]) > 0: ros.family = FAMILY_REDHAT if sum([1 for x in DEBS if ros.name.lower().startswith(x)]) > 0: ros.family = FAMILY_DEBIAN return def os_packager(ros): if ros.like is not None: if util.startswith(ros.like, YUMS): ros.packager = PKG_YUM if util.startswith(ros.like, DEBS): ros.packager = PKG_APT return ros if ros.name is not None: if sum([1 for x in YUMS if ros.name.lower().startswith(x)]) > 0: ros.packager = PKG_YUM if sum([1 for x in DEBS if ros.name.lower().startswith(x)]) > 0: ros.packager = PKG_APT return if os.path.exists('/etc/yum'): ros.packager = PKG_YUM if os.path.exists('/etc/apt/sources.list'): ros.packager = PKG_APT def os_start_system(ros): if os.path.exists('/etc/systemd'): ros.start_system = START_SYSTEMD else: ros.start_system = START_INITD return ros def os_issue(ros): if os.path.exists('/etc/issue'): with open('/etc/issue', 'r') as fh: issue = fh.readline().strip() issue = re.sub(r'\\[a-z]', '', issue).strip() match1 = re.match(r'^(.+?)\s+release\s+(.+?)$', issue, re.IGNORECASE) match2 = re.match(r'^(.+?)\s+([0-9.]+)\s*(LTS)?$', issue, re.IGNORECASE) if match1: ros.long_name = match1.group(1).strip() ros.version = match1.group(2).strip() elif match2: ros.long_name = match2.group(1).strip() ros.version = match2.group(2).strip() else: ros.long_name = issue return ros def os_debian_version(ros): if os.path.exists('/etc/debian_version'): with open('/etc/debian_version', 'r') as fh: debver = fh.readline().strip() ros.like = 'debian' ros.family = FAMILY_DEBIAN if ros.version is None: ros.version = debver.strip() return ros def os_redhat_release(ros): if os.path.exists('/etc/redhat-release'): with open('/etc/redhat-release', 'r') as fh: redhatrel = fh.readline().strip() ros.like = 'redhat' ros.family = FAMILY_REDHAT match = re.match(r'^(.+?)\s+release\s+(.+?)$', redhatrel, re.IGNORECASE) if match is not None: ros.long_name = match.group(1).strip() ros.version = match.group(2).strip() else: ros.long_name = redhatrel return ros def os_like_detect(ros): if not ros.like and ros.name is not None: try: ros.like = FLAVORS[ros.name.lower()] except: pass if not ros.like and ros.long_name is not None: try: ros.like = FLAVORS[ros.long_name.lower()] except: pass return ros def os_major_version(ros): if ros.version is not None: match = re.match(r'(.+?)[/.]', ros.version) if match: ros.version_major = match.group(1) return ros def get_os_info(filepath="/etc/os-release"): """ Get OS name and version :param str filepath: File path of os-release file :returns: (os_name, os_version) :rtype: `tuple` of `str` """ if os.path.isfile(filepath): # Systemd os-release parsing might be viable os_name, os_version = get_systemd_os_info(filepath=filepath) if os_name: return (os_name, os_version) # Fallback to platform module return get_python_os_info() def get_os_info_ua(filepath="/etc/os-release"): """ Get OS name and version string for User Agent :param str filepath: File path of os-release file :returns: os_ua :rtype: `str` """ if os.path.isfile(filepath): os_ua = _get_systemd_os_release_var("PRETTY_NAME", filepath=filepath) if not os_ua: os_ua = _get_systemd_os_release_var("NAME", filepath=filepath) if os_ua: return os_ua # Fallback return " ".join(get_python_os_info()) def get_systemd_os_info(filepath="/etc/os-release"): """ Parse systemd /etc/os-release for distribution information :param str filepath: File path of os-release file :returns: (os_name, os_version) :rtype: `tuple` of `str` """ os_name = _get_systemd_os_release_var("ID", filepath=filepath) os_version = _get_systemd_os_release_var("VERSION_ID", filepath=filepath) return (os_name, os_version) def get_systemd_os_like(filepath="/etc/os-release"): """ Get a list of strings that indicate the distribution likeness to other distributions. :param str filepath: File path of os-release file :returns: List of distribution acronyms :rtype: `list` of `str` """ return _get_systemd_os_release_var("ID_LIKE", filepath).split(" ") def _get_systemd_os_release_var(varname, filepath="/etc/os-release"): """ Get single value from systemd /etc/os-release :param str varname: Name of variable to fetch :param str filepath: File path of os-release file :returns: requested value :rtype: `str` """ var_string = varname+"=" if not os.path.isfile(filepath): return "" with open(filepath, 'r') as fh: contents = fh.readlines() for line in contents: if line.strip().startswith(var_string): # Return the value of var, normalized return normalize_string(line.strip()[len(var_string):]) return "" def get_python_os_info(): """ Get Operating System type/distribution and major version using python platform module :returns: (os_name, os_version) :rtype: `tuple` of `str` """ info = platform.system_alias( platform.system(), platform.release(), platform.version() ) os_type, os_ver, _ = info os_type = os_type.lower() if os_type.startswith('linux'): info = platform.linux_distribution() # On arch, platform.linux_distribution() is reportedly ('','',''), # so handle it defensively if info[0]: os_type = info[0] if info[1]: os_ver = info[1] elif os_type.startswith('darwin'): os_ver = subprocess.Popen( ["sw_vers", "-productVersion"], stdout=subprocess.PIPE ).communicate()[0].rstrip('\n') elif os_type.startswith('freebsd'): # eg "9.3-RC3-p1" os_ver = os_ver.partition("-")[0] os_ver = os_ver.partition(".")[0] elif platform.win32_ver()[1]: os_ver = platform.win32_ver()[1] else: # Cases known to fall here: Cygwin python os_ver = '' return os_type, os_ver def os_like(key): """ Tries to transform OS ID to LIKE_ID :param key: :return: string or None """ try: return FLAVORS[key.lower()] except KeyError: return None def os_constant(key): """ Get a constant value for operating system :param key: name of cli constant :return: value of constant for active os """ os_info = get_os_info() try: constants = CLI_DEFAULTS[os_info[0].lower()] except KeyError: constants = os_like_constants() if not constants: constants = CLI_DEFAULTS["default"] return constants[key] def os_like_constants(): """ Try to get constants for distribution with similar layout and configuration, indicated by /etc/os-release variable "LIKE"<|fim▁hole|> :returns: Constants dictionary :rtype: `dict` """ os_like = get_systemd_os_like() if os_like: for os_name in os_like: if os_name in CLI_DEFAULTS.keys(): return CLI_DEFAULTS[os_name] return {} def get_yum_packages(out): """ List of all packages parsing :param out: :return: """ ret = [] lines = out if isinstance(out, types.ListType) else out.split('\n') for line in lines: line = line.strip() match = re.match(r'^([a-zA-Z0-9.\-_]+)[\s\t]+([a-zA-Z0-9.:\-_]+)[\s\t]+([@a-zA-Z0-9.\-_]+)$', line) if match is None: continue package = match.group(1).strip() version = match.group(2).strip() repo = match.group(3).strip() arch = None # Architecture extract match_arch = re.match(r'^(.+?)\.([^.]+)$', package) if match_arch: package = match_arch.group(1).strip() arch = match_arch.group(2).strip() pkg = PackageInfo(name=package, version=version, arch=arch, repo=repo) ret.append(pkg) return ret def get_yum_packages_update(out): """ List of packages to update parsing :param out: :return: """ ret = [] eqline = 0 cur_section = None lines = out if isinstance(out, types.ListType) else out.split('\n') for line in lines: line = line.strip() if line.startswith('====='): eqline += 1 continue # Process lines only after 2nd ====== line - should be the package list. if eqline != 2: continue lmatch = re.match(r'^([a-zA-Z\s]+):$', line) if lmatch is not None: cur_section = lmatch.group(1) continue match = re.match(r'^([a-zA-Z0-9.\-_]+)[\s\t]+([a-zA-Z0-9.\-_]+)[\s\t]+([a-zA-Z0-9.:\-_]+)' r'[\s\t]+([@a-zA-Z0-9.:\-_]+)[\s\t]+([a-zA-Z0-9.\-_\s]+?)$', line) if match is None: continue package = match.group(1).strip() version = match.group(3).strip() repo = match.group(4).strip() arch = match.group(2).strip() size = match.group(5).strip() pkg = PackageInfo(name=package, version=version, arch=arch, repo=repo, size=size, section=cur_section) ret.append(pkg) return ret def check_package_restrictions(yum_output_packages, allowed_packages): """ Checks list of the yum output pakcages vs. allowed packages :param yum_output_packages: :param check_packages: :return: (conflicting packages, new packages) """ new_packages = [] conflicting_packages = [] for out_package in yum_output_packages: allowed_list = [x for x in allowed_packages if x.name == out_package.name] if len(allowed_list) == 0: new_packages.append(out_package) continue # Sort packages based on the version, highest first. if len(allowed_list) > 1: allowed_list.sort(key=lambda x: x.version, reverse=True) allowed = allowed_list[0] if out_package.version > allowed.version: conflicting_packages.append(out_package) return conflicting_packages, new_packages def package_diff(a, b, only_in_b=False): """ Package diff a - b package x \in a is removed from a if the same package (or higher version) is in b. If there are more packages in b, the one with higher version is taken Used for removing already installed packages (b) from the packages to install (a). :param a: :param b: :param only_in_b: if True the element in a has to be in the b in the lower version. :return: """ res = [] for pkg in a: b_filtered = [x for x in b if x.name == pkg.name and x.arch == pkg.arch] # New package, not in b if len(b_filtered) == 0: if not only_in_b: res.append(pkg) continue # Sort packages based on the version, highest first. if len(b_filtered) > 1: b_filtered.sort(key=lambda x: x.version, reverse=True) # b contains smaller version of the package, add to the result if b_filtered[0].version < pkg.version: res.append(pkg) return res<|fim▁end|>
<|file_name|>err.rs<|end_file_name|><|fim▁begin|>use sys; pub struct Error { priv n:int } pub enum ErrorResult<T> { Ok(T), Err(Error) } impl Error { #[inline(always)] pub fn is_err(&self) -> bool { self.n != 0 } #[inline(always)] pub fn msg(&self) -> &'static str { msg(-1*self.n) } #[inline(always)] pub fn is(&self, code: int) -> bool { (-1*self.n) == code } } impl<T> ErrorResult<T> { #[inline] pub fn unwrap(self) -> T { match self { Ok(d) => d, Err(err) => sys::fail(err.msg()) } } #[inline] pub fn is_error(&self) -> bool { match *self { Ok(_) => false, Err(a) => a.is_err() } } } pub static EPERM : int = 1; pub static ENOENT : int = 2; pub static ESRCH : int = 3; pub static EINTR : int = 4; pub static EIO : int = 5; pub static ENXIO : int = 6; pub static E2BIG : int = 7; pub static ENOEXEC : int = 8; pub static EBADF : int = 9; pub static ECHILD : int = 10; pub static EAGAIN : int = 11; pub static ENOMEM : int = 12; pub static EACCES : int = 13; pub static EFAULT : int = 14; pub static ENOTBLK : int = 15; pub static EBUSY : int = 16; pub static EEXIST : int = 17; pub static EXDEV : int = 18; pub static ENODEV : int = 19; pub static ENOTDIR : int = 20; pub static EISDIR : int = 21; pub static EINVAL : int = 22; pub static ENFILE : int = 23; pub static EMFILE : int = 24; pub static ENOTTY : int = 25; pub static ETXTBSY : int = 26; pub static EFBIG : int = 27; pub static ENOSPC : int = 28; pub static ESPIPE : int = 29; pub static EROFS : int = 30; pub static EMLINK : int = 31; pub static EPIPE : int = 32; pub static EDOM : int = 33; pub static ERANGE : int = 34; pub static EDEADLK : int = 35; pub static ENAMETOOLONG : int = 36; pub static ENOLCK : int = 37; pub static ENOSYS : int = 38; pub static ENOTEMPTY : int = 39; pub static ELOOP : int = 40; pub static EWOULDBLOCK : int = EAGAIN; pub static ENOMSG : int = 42; pub static EIDRM : int = 43; pub static ECHRNG : int = 44; pub static EL2NSYNC : int = 45; pub static EL3HLT : int = 46; pub static EL3RST : int = 47; pub static ELNRNG : int = 48; pub static EUNATCH : int = 49; pub static ENOCSI : int = 50; pub static EL2HLT : int = 51; pub static EBADE : int = 52; pub static EBADR : int = 53; pub static EXFULL : int = 54; pub static ENOANO : int = 55; pub static EBADRQC : int = 56; pub static EBADSLT : int = 57; pub static EDEADLOCK : int = EDEADLK; pub static EBFONT : int = 59; pub static ENOSTR : int = 60; pub static ENODATA : int = 61; pub static ETIME : int = 62; pub static ENOSR : int = 63; pub static ENONET : int = 64; pub static ENOPKG : int = 65; pub static EREMOTE : int = 66; pub static ENOLINK : int = 67; pub static EADV : int = 68; pub static ESRMNT : int = 69; pub static ECOMM : int = 70; pub static EPROTO : int = 71; pub static EMULTIHOP : int = 72; pub static EDOTDOT : int = 73; pub static EBADMSG : int = 74; pub static EOVERFLOW : int = 75; pub static ENOTUNIQ : int = 76; pub static EBADFD : int = 77; pub static EREMCHG : int = 78; pub static ELIBACC : int = 79; pub static ELIBBAD : int = 80; pub static ELIBSCN : int = 81; pub static ELIBMAX : int = 82; pub static ELIBEXEC : int = 83; pub static EILSEQ : int = 84; pub static ERESTART : int = 85; pub static ESTRPIPE : int = 86; pub static EUSERS : int = 87; pub static ENOTSOCK : int = 88; pub static EDESTADDRREQ : int = 89; pub static EMSGSIZE : int = 90; pub static EPROTOTYPE : int = 91; pub static ENOPROTOOPT : int = 92; pub static EPROTONOSUPPORT : int = 93; pub static ESOCKTNOSUPPORT : int = 94; pub static EOPNOTSUPP : int = 95; pub static ENOTSUP : int = EOPNOTSUPP; pub static EPFNOSUPPORT : int = 96; pub static EAFNOSUPPORT : int = 97; pub static EADDRINUSE : int = 98; pub static EADDRNOTAVAIL : int = 99; pub static ENETDOWN : int = 100; pub static ENETUNREACH : int = 101; pub static ENETRESET : int = 102; pub static ECONNABORTED : int = 103; pub static ECONNRESET : int = 104; pub static ENOBUFS : int = 105; pub static EISCONN : int = 106; pub static ENOTCONN : int = 107; pub static ESHUTDOWN : int = 108; pub static ETOOMANYREFS : int = 109; pub static ETIMEDOUT : int = 110; pub static ECONNREFUSED : int = 111; pub static EHOSTDOWN : int = 112; pub static EHOSTUNREACH : int = 113; pub static EALREADY : int = 114; pub static EINPROGRESS : int = 115; pub static ESTALE : int = 116; pub static EUCLEAN : int = 117; pub static ENOTNAM : int = 118; pub static ENAVAIL : int = 119; pub static EISNAM : int = 120; pub static EREMOTEIO : int = 121; pub static EDQUOT : int = 122; pub static ENOMEDIUM : int = 123; pub static EMEDIUMTYPE : int = 124; pub static ECANCELED : int = 125; pub static ENOKEY : int = 126; pub static EKEYEXPIRED : int = 127; pub static EKEYREVOKED : int = 128; pub static EKEYREJECTED : int = 129; pub static EOWNERDEAD : int = 130; pub static ENOTRECOVERABLE : int = 131; pub static ERFKILL : int = 132; pub static EHWPOISON : int = 133; // The above errors are treated as indexes into // this array static ERR_MSG : &'static [&'static str] = &[ "Success", "Operation not permitted", "No such file or directory", "No such process", "Interrupted system call", "Input/output error", "No such device or address", "Argument list too long", "Exec format error", "Bad file descriptor", "No child processes", "Resource temporarily unavailable", "Cannot allocate memory", "Permission denied", "Bad address", "Block device required", "Device or resource busy", "File exists", "Invalid cross-device link", "No such device", "Not a directory", "Is a directory", "Invalid argument", "Too many open files in system", "Too many open files", "Inappropriate ioctl for device", "Text file busy", "File too large", "No space left on device", "Illegal seek", "Read-only file system", "Too many links", "Broken pipe", "Numerical argument out of domain", "Numerical result out of range", "Resource deadlock avoided", "File name too long", "No locks available", "Function not implemented", "Directory not empty", "Too many levels of symbolic links", "Unknown error 41", "No message of desired type", "Identifier removed", "Channel number out of range", "Level 2 not synchronized", "Level 3 halted", "Level 3 reset", "Link number out of range", "Protocol driver not attached", "No CSI structure available", "Level 2 halted", "Invalid exchange", "Invalid request descriptor",<|fim▁hole|> "Invalid slot", "Unknown error 58", "Bad font file format", "Device not a stream", "No data available", "Timer expired", "Out of streams resources", "Machine is not on the network", "Package not installed", "Object is remote", "Link has been severed", "Advertise error", "Srmount error", "Communication error on send", "Protocol error", "Multihop attempted", "RFS specific error", "Bad message", "Value too large for defined data type", "Name not unique on network", "File descriptor in bad state", "Remote address changed", "Can not access a needed shared library", "Accessing a corrupted shared library", ".lib section in a.out corrupted", "Attempting to link in too many shared libraries", "Cannot exec a shared library directly", "Invalid or incomplete multibyte or wide character", "Interrupted system call should be restarted", "Streams pipe error", "Too many users", "Socket operation on non-socket", "Destination address required", "Message too long", "Protocol wrong type for socket", "Protocol not available", "Protocol not supported", "Socket type not supported", "Operation not supported", "Protocol family not supported", "Address family not supported by protocol", "Address already in use", "Cannot assign requested address", "Network is down", "Network is unreachable", "Network dropped connection on reset", "Software caused connection abort", "Connection reset by peer", "No buffer space available", "Transport endpoint is already connected", "Transport endpoint is not connected", "Cannot send after transport endpoint shutdown", "Too many references: cannot splice", "Connection timed out", "Connection refused", "Host is down", "No route to host", "Operation already in progress", "Operation now in progress", "Stale NFS file handle", "Structure needs cleaning", "Not a XENIX named type file", "No XENIX semaphores available", "Is a named type file", "Remote I/O error", "Disk quota exceeded", "No medium found", "Wrong medium type", "Operation canceled", "Required key not available", "Key has expired", "Key has been revoked", "Key was rejected by service", "Owner died", "State not recoverable", "Operation not possible due to RF-kill", "Memory page has hardware error" ]; #[inline] pub fn msg(errno: int) -> &'static str { use std::cast; let errno = errno as uint; let len = unsafe { let (_, len) : (uint, uint) = cast::transmute(ERR_MSG); len }; unsafe { if errno < len { *ERR_MSG.unsafe_ref(errno) } else { "Unknown Error" } } }<|fim▁end|>
"Exchange full", "No anode", "Invalid request code",
<|file_name|>analysis.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Katsuya Noguchi # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,<|fim▁hole|> import http_client def get_by_species(species): path = '/info/analysis/%s' % species return http_client.get(path)<|fim▁end|>
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE.
<|file_name|>convert_gromacs2espp.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright (C) 2012,2013,2015(H),2016 # Max Planck Institute for Polymer Research # Copyright (C) 2008,2009,2010,2011 # Max-Planck-Institute for Polymer Research & Fraunhofer SCAI # # This file is part of ESPResSo++. # # ESPResSo++ is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ESPResSo++ is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import argparse import math import re def convertTable(gro_in_file, esp_out_file, sigma=1.0, epsilon=1.0, c6=1.0, c12=1.0): """Convert GROMACS tabulated file into ESPResSo++ tabulated file (new file is created). First column of input file can be either distance or angle. For non-bonded files, c6 and c12 can be provided. Default value for sigma, epsilon, c6 and c12 is 1.0. Electrostatics are not taken into account (f and fd columns). Keyword arguments: gro_in_file -- the GROMACS tabulated file name (bonded, nonbonded, angle or dihedral). esp_out_file -- filename of the ESPResSo++ tabulated file to be written. sigma -- optional, depending on whether you want to convert units or not. epsilon -- optional, depending on whether you want to convert units or not. c6 -- optional c12 -- optional """ # determine file type bonded, angle, dihedral = False, False, False re_bond = re.compile('.*_b[0-9]+.*') re_angle = re.compile('.*_a[0-9]+.*') re_dihedral = re.compile('.*_d[0-9]+.*') if re.match(re_bond, gro_in_file): bonded = True elif re.match(re_angle, gro_in_file): angle = True bonded = True elif re.match(re_dihedral, gro_in_file): dihedral = True bonded = True fin = open(gro_in_file, 'r') fout = open(esp_out_file, 'w') if bonded: # bonded has 3 columns for line in fin: if line[0] == "#": # skip comment lines continue columns = line.split() r = float(columns[0]) f = float(columns[1]) # energy fd= float(columns[2]) # force # convert units if angle or dihedral: # degrees to radians r = math.radians(r) fd=fd*180/math.pi else: r = r / sigma e = f / epsilon f = fd*sigma / epsilon if (not angle and not dihedral and r != 0) or \ (angle and r <= math.pi and r > 0) or \ (dihedral and r >= -math.pi and r <= math.pi): fout.write("%15.8g %15.8g %15.8g\n" % (r, e, f)) else: # non-bonded has 7 columns for line in fin: if line.startswith('#'): # skip comment lines continue columns = line.split() r = float(columns[0]) g = float(columns[3]) # dispersion gd= float(columns[4]) h = float(columns[5]) # repulsion hd= float(columns[6]) e = c6*g + c12*h f = c6*gd+ c12*hd # convert units r = r / sigma e = e / epsilon f = f*sigma / epsilon if r != 0: # skip 0 fout.write("%15.8g %15.8g %15.8g\n" % (r, e, f)) fin.close()<|fim▁hole|> parser = argparse.ArgumentParser() parser.add_argument('in_file') parser.add_argument('out_file') return parser def main(): args = _args().parse_args() convertTable(args.in_file, args.out_file) if __name__ == '__main__': main()<|fim▁end|>
fout.close() def _args():
<|file_name|>GridStoreAdapter.js<|end_file_name|><|fim▁begin|>/** GridStoreAdapter Stores files in Mongo using GridStore Requires the database adapter to be based on mongoclient @flow weak */ import { MongoClient, GridStore, Db} from 'mongodb'; import { FilesAdapter } from './FilesAdapter'; import defaults from '../../defaults'; export class GridStoreAdapter extends FilesAdapter { _databaseURI: string; _connectionPromise: Promise<Db>; constructor(mongoDatabaseURI = defaults.DefaultMongoURI) { super(); this._databaseURI = mongoDatabaseURI; } _connect() { if (!this._connectionPromise) { this._connectionPromise = MongoClient.connect(this._databaseURI); } return this._connectionPromise; } // For a given config object, filename, and data, store a file // Returns a promise createFile(filename: string, data) { return this._connect().then(database => { const gridStore = new GridStore(database, filename, 'w'); return gridStore.open(); }).then(gridStore => { return gridStore.write(data); }).then(gridStore => { return gridStore.close(); }); } deleteFile(filename: string) { return this._connect().then(database => { const gridStore = new GridStore(database, filename, 'r'); return gridStore.open(); }).then((gridStore) => { return gridStore.unlink(); }).then((gridStore) => { return gridStore.close(); }); } getFileData(filename: string) { return this._connect().then(database => { return GridStore.exist(database, filename) .then(() => { const gridStore = new GridStore(database, filename, 'r'); return gridStore.open(); }); }).then(gridStore => { return gridStore.read(); }); } getFileLocation(config, filename) { return (config.mount + '/files/' + config.applicationId + '/' + encodeURIComponent(filename)); } getFileStream(filename: string) { return this._connect().then(database => { return GridStore.exist(database, filename).then(() => { const gridStore = new GridStore(database, filename, 'r'); return gridStore.open(); }); }); } } <|fim▁hole|><|fim▁end|>
export default GridStoreAdapter;
<|file_name|>jsonfeedparser.ts<|end_file_name|><|fim▁begin|>import { Writable } from 'stream'; import { default as clarinet } from 'clarinet'; import { Parser, Feed } from './parser'; type JSONValue = string | boolean | null; type JSONObject = { [key: string]: JSONValue | JSONObject | JSONObject[]; } type JSONArray = JSONObject[]; type JSONAny = JSONObject | JSONArray; type JSONRoot = JSONObject & { type: string; items: JSONObject[]; } /** * Gets rid of unnecessary keys in an object * and if the only keys in an object are `$t` or `$t` and `type` * it makes the whole object the value of `$t` * otherwise it renames `$t` to `text` for consistency. * * @param {Object} * @returns {Object} */ const cleanObject = (obj: JSONObject) => {<|fim▁hole|> let keysLength = Object.keys(obj).length; if (keysLength === 1 || (keysLength === 2 && obj.type !== undefined)) { return obj.$t; } else { obj.text = obj.$t; delete obj.$t; } } return obj; }; /** * Parses a JSON feed. * * @param {boolean} buffer If true, will buffer entire object. * @return {CStream} */ export default class JSONFeedParser extends Writable implements Parser { _buffer: boolean; parser: Writable; private _currObj: JSONAny; private _currKey: string | number; constructor(buffer: boolean) { super(); this._buffer = buffer; const parser = this.parser = clarinet.createStream() as unknown as Writable; type StackItem = { obj: JSONAny; key: string | number; arr: boolean; } const stack: StackItem[] = []; this._currObj = {}; this._currKey = 'feed'; let inArray = false; let feedJustFound = false; // Look feed object in case this is a json encoded atom feed. // Place these underlying keys onto the root object. const findfeed = (key: string) => { if (key === 'feed') { feedJustFound = true; parser.removeListener('openobject', findfeed); parser.removeListener('key', findfeed); } }; const onvalue = (value: JSONValue) => { feedJustFound = false; (this._currObj as JSONObject)[this._currKey] = value; if (stack.length === 1) { parser.emit(this._currKey as string, value); if (!buffer) { delete (this._currObj as JSONObject)[this._currKey as string]; } } if (inArray) { (this._currKey as number)++; } }; const onopenobject = (key: string) => { if (feedJustFound) { feedJustFound = false; this._currKey = key; return; } let obj = (this._currObj as JSONObject)[this._currKey] = {}; stack.push({ obj: this._currObj, key: this._currKey, arr: inArray, }); this._currObj = obj; this._currKey = key; inArray = false; }; const onkey = (key: string) => { this._currKey = key; }; const oncloseobject = () => { let parent = stack.pop(); if (!parent) { return; } this._currObj = parent.obj; this._currKey = parent.key; inArray = parent.arr; // Clean object. const currObj = this._currObj as JSONObject; currObj[this._currKey] = cleanObject(currObj[this._currKey] as JSONObject); // Emit key in feed if curr is parent. if (stack.length === 1) { parser.emit(`${this._currKey}`, currObj[this._currKey]); if (!buffer) { delete currObj[this._currKey]; } // Or parent is array. } else if (inArray) { const currArr = currObj as unknown as JSONArray; if (stack.length === 2) { let key = stack[1].key; let event = key === 'entry' || key === 'items' ? 'item' : stack[1].key as string; let data = currArr[this._currKey as number]; parser.emit(event, data); if (!buffer) { currArr.splice(this._currKey as number, 1); } } if (stack.length > 2 || buffer) { (this._currKey as number)++; } } }; const onopenarray = () => { feedJustFound = false; let obj: JSONAny = (this._currObj as JSONObject)[this._currKey] = []; stack.push({ obj: this._currObj, key: this._currKey, arr: inArray, }); this._currObj = obj; this._currKey = 0; inArray = true; }; const onclosearray = () => { let parent = stack.pop() as StackItem; this._currObj = parent.obj; this._currKey = parent.key; inArray = parent.arr; if (stack.length === 1) { if (!buffer) { delete (this._currObj as JSONObject)[this._currKey]; } } else if (inArray) { (this._currKey as number)++; } }; parser.on('openobject', findfeed); parser.on('key', findfeed); parser.on('value', onvalue); parser.on('openobject', onopenobject); parser.on('key', onkey); parser.on('closeobject', oncloseobject); parser.on('openarray', onopenarray); parser.on('closearray', onclosearray); } _write(chunk: Buffer, encoding: BufferEncoding, callback: (err?: Error | null) => void) { this.parser.write(chunk, encoding); callback(null); } _final(callback: (err?: Error | null) => void) { this.parser.end(); callback(null); } done() { if (!this._buffer) { return; } let root = (this._currObj as JSONObject)[this._currKey] as JSONRoot; root.type = 'json'; if (Array.isArray(root.entry)) { root.items = root.entry; } delete root.entry; return root as Feed; } }<|fim▁end|>
if (obj.$t !== undefined) {
<|file_name|>lint-missing-doc.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // When denying at the crate level, be sure to not get random warnings from the // injected intrinsics by the compiler. #![deny(missing_docs)] #![allow(dead_code)] //! Some garbage docs for the crate here #![doc="More garbage"] type Typedef = String; pub type PubTypedef = String; //~ ERROR: missing documentation for a type alias struct Foo { a: isize, b: isize, } pub struct PubFoo { //~ ERROR: missing documentation for a struct pub a: isize, //~ ERROR: missing documentation for a struct field b: isize, } #[allow(missing_docs)] pub struct PubFoo2 { pub a: isize, pub c: isize, } mod module_no_dox {} pub mod pub_module_no_dox {} //~ ERROR: missing documentation for a module /// dox pub fn foo() {} pub fn foo2() {} //~ ERROR: missing documentation for a function fn foo3() {} #[allow(missing_docs)] pub fn foo4() {} /// dox pub trait A { /// dox fn foo(&self); /// dox fn foo_with_impl(&self) {} } #[allow(missing_docs)] trait B { fn foo(&self); fn foo_with_impl(&self) {} } pub trait C { //~ ERROR: missing documentation for a trait fn foo(&self); //~ ERROR: missing documentation for a type method fn foo_with_impl(&self) {} //~ ERROR: missing documentation for a method } #[allow(missing_docs)] pub trait D { fn dummy(&self) { } } /// dox pub trait E { type AssociatedType; //~ ERROR: missing documentation for an associated type type AssociatedTypeDef = Self; //~ ERROR: missing documentation for an associated type /// dox type DocumentedType; /// dox type DocumentedTypeDef = Self; /// dox fn dummy(&self) {} } impl Foo { pub fn foo() {} fn bar() {} } impl PubFoo { pub fn foo() {} //~ ERROR: missing documentation for a method /// dox pub fn foo1() {} fn foo2() {} #[allow(missing_docs)] pub fn foo3() {} } #[allow(missing_docs)] trait F { fn a(); fn b(&self); } // should need to redefine documentation for implementations of traits impl F for Foo { fn a() {} fn b(&self) {} } // It sure is nice if doc(hidden) implies allow(missing_docs), and that it // applies recursively #[doc(hidden)] mod a {<|fim▁hole|> pub mod b { pub fn baz() {} } } enum Baz { BazA { a: isize, b: isize }, BarB } pub enum PubBaz { //~ ERROR: missing documentation for an enum PubBazA { //~ ERROR: missing documentation for a variant a: isize, //~ ERROR: missing documentation for a struct field }, } /// dox pub enum PubBaz2 { /// dox PubBaz2A { /// dox a: isize, }, } #[allow(missing_docs)] pub enum PubBaz3 { PubBaz3A { b: isize }, } #[doc(hidden)] pub fn baz() {} mod internal_impl { /// dox pub fn documented() {} pub fn undocumented1() {} //~ ERROR: missing documentation for a function pub fn undocumented2() {} //~ ERROR: missing documentation for a function fn undocumented3() {} /// dox pub mod globbed { /// dox pub fn also_documented() {} pub fn also_undocumented1() {} //~ ERROR: missing documentation for a function fn also_undocumented2() {} } } /// dox pub mod public_interface { pub use internal_impl::documented as foo; pub use internal_impl::undocumented1 as bar; pub use internal_impl::{documented, undocumented2}; pub use internal_impl::globbed::*; } fn main() {}<|fim▁end|>
pub fn baz() {}
<|file_name|>rainbow_random_circles.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2 from pypixel import *<|fim▁hole|> h = 0 while True: x = random(WIDTH) y = random(HEIGHT) r = random(50, 100) h += 1 h %= 360 s = 100 v = 100 c = hsv2rgb((h, s, v)) circle(c, (x, y), r) update()<|fim▁end|>
show()
<|file_name|>answerer.py<|end_file_name|><|fim▁begin|>from sys import version_info from functools import reduce from operator import mul from flask_babel import gettext if version_info[0] == 3: unicode = str keywords = ('min', 'max', 'avg', 'sum', 'prod') # required answerer function # can return a list of results (any result type) for a given query def answer(query): parts = query.query.split() if len(parts) < 2: return [] try: args = list(map(float, parts[1:])) except: return [] func = parts[0] answer = None if func == b'min': answer = min(args) elif func == b'max': answer = max(args) elif func == b'avg': answer = sum(args) / len(args) elif func == b'sum': answer = sum(args) elif func == b'prod': answer = reduce(mul, args, 1) if answer is None: return [] return [{'answer': unicode(answer)}] # required answerer function<|fim▁hole|> 'description': gettext('Compute {functions} of the arguments').format(functions='/'.join(keywords)), 'examples': ['avg 123 548 2.04 24.2']}<|fim▁end|>
# returns information about the answerer def self_info(): return {'name': gettext('Statistics functions'),
<|file_name|>simple.js<|end_file_name|><|fim▁begin|>import Ember from 'ember'; import ColumnDefinition from 'ember-table/models/column-definition'; export default Ember.Controller.extend({ tableColumns: Ember.computed(function() { var dateColumn = ColumnDefinition.create({ savedWidth: 150, textAlign: 'text-align-left', headerCellName: 'Date', getCellContent: function(row) { return row.get('date').toDateString(); } }); var openColumn = ColumnDefinition.create({ savedWidth: 100, headerCellName: 'Open', getCellContent: function(row) { return row.get('open').toFixed(2); } }); var highColumn = ColumnDefinition.create({ savedWidth: 100, headerCellName: 'High', getCellContent: function(row) { return row.get('high').toFixed(2); } }); var lowColumn = ColumnDefinition.create({ savedWidth: 100,<|fim▁hole|> }); var closeColumn = ColumnDefinition.create({ savedWidth: 100, headerCellName: 'Close', getCellContent: function(row) { return row.get('close').toFixed(2); } }); return [dateColumn, openColumn, highColumn, lowColumn, closeColumn]; }), tableContent: Ember.computed(function() { return _.range(100).map(function(index) { var date = new Date(); date.setDate(date.getDate() + index); return { date: date, open: Math.random() * 100 - 50, high: Math.random() * 100 - 50, low: Math.random() * 100 - 50, close: Math.random() * 100 - 50, volume: Math.random() * 1000000 }; }); }) });<|fim▁end|>
headerCellName: 'Low', getCellContent: function(row) { return row.get('low').toFixed(2); }
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). mod hash; pub use hash::Fingerprint; mod snapshot; pub use snapshot::{GetFileDigest, Snapshot}; mod store; pub use store::{Digest, Store}; mod pool; pub use pool::ResettablePool; extern crate bazel_protos; extern crate boxfuture; extern crate digest; extern crate futures; extern crate futures_cpupool; extern crate glob; extern crate grpcio; extern crate hex; extern crate ignore; extern crate itertools; #[macro_use] extern crate lazy_static; extern crate lmdb; extern crate ordermap; extern crate protobuf; extern crate sha2; extern crate tar; extern crate tempdir; use std::collections::HashSet; use std::os::unix::fs::PermissionsExt; use std::path::{Component, Path, PathBuf}; use std::sync::{Arc, Mutex}; use std::{fmt, fs}; use std::io::{self, Read}; use std::cmp::min; use futures::future::{self, Future}; use futures_cpupool::CpuFuture; use glob::Pattern; use ignore::gitignore::{Gitignore, GitignoreBuilder}; use ordermap::OrderMap; use tempdir::TempDir; use boxfuture::{Boxable, BoxFuture}; use hash::WriterHasher; #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub enum Stat { Link(Link), Dir(Dir), File(File), } impl Stat { pub fn path(&self) -> &Path { match self { &Stat::Dir(Dir(ref p)) => p.as_path(), &Stat::File(File { path: ref p, .. }) => p.as_path(), &Stat::Link(Link(ref p)) => p.as_path(), } } } #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub struct Link(pub PathBuf); #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub struct Dir(pub PathBuf); #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub struct File { pub path: PathBuf, pub is_executable: bool, } #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub enum PathStat { Dir { // The symbolic name of some filesystem Path, which is context specific. path: PathBuf, // The canonical Stat that underlies the Path. stat: Dir, }, File { // The symbolic name of some filesystem Path, which is context specific. path: PathBuf, // The canonical Stat that underlies the Path. stat: File, }, } impl PathStat { fn dir(path: PathBuf, stat: Dir) -> PathStat { PathStat::Dir { path: path, stat: stat, } } fn file(path: PathBuf, stat: File) -> PathStat { PathStat::File { path: path, stat: stat, } } pub fn path(&self) -> &Path { match self { &PathStat::Dir { ref path, .. } => path.as_path(), &PathStat::File { ref path, .. } => path.as_path(), } } } lazy_static! { static ref PARENT_DIR: &'static str = ".."; static ref SINGLE_STAR_GLOB: Pattern = Pattern::new("*").unwrap(); static ref DOUBLE_STAR: &'static str = "**"; static ref DOUBLE_STAR_GLOB: Pattern = Pattern::new("**").unwrap(); static ref EMPTY_IGNORE: Arc<Gitignore> = Arc::new(Gitignore::empty()); } #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub enum PathGlob { Wildcard { canonical_dir: Dir, symbolic_path: PathBuf, wildcard: Pattern, }, DirWildcard { canonical_dir: Dir, symbolic_path: PathBuf, wildcard: Pattern, remainder: Vec<Pattern>, }, } impl PathGlob { fn wildcard(canonical_dir: Dir, symbolic_path: PathBuf, wildcard: Pattern) -> PathGlob { PathGlob::Wildcard { canonical_dir: canonical_dir, symbolic_path: symbolic_path, wildcard: wildcard, } } fn dir_wildcard( canonical_dir: Dir, symbolic_path: PathBuf, wildcard: Pattern, remainder: Vec<Pattern>, ) -> PathGlob { PathGlob::DirWildcard { canonical_dir: canonical_dir, symbolic_path: symbolic_path, wildcard: wildcard, remainder: remainder, } } pub fn create(filespecs: &[String]) -> Result<Vec<PathGlob>, String> { let mut path_globs = Vec::new(); for filespec in filespecs { let canonical_dir = Dir(PathBuf::new()); let symbolic_path = PathBuf::new(); path_globs.extend(PathGlob::parse(canonical_dir, symbolic_path, filespec)?); } Ok(path_globs) } /// /// Given a filespec String relative to a canonical Dir and path, split it into path components /// while eliminating consecutive '**'s (to avoid repetitive traversing), and parse it to a /// series of PathGlob objects. /// fn parse( canonical_dir: Dir, symbolic_path: PathBuf, filespec: &str, ) -> Result<Vec<PathGlob>, String> { let mut parts = Vec::new(); let mut prev_was_doublestar = false; for component in Path::new(filespec).components() { let part = match component { Component::Prefix(..) | Component::RootDir => return Err(format!("Absolute paths not supported: {:?}", filespec)), Component::CurDir => continue, c => c.as_os_str(), }; // Ignore repeated doublestar instances. let cur_is_doublestar = *DOUBLE_STAR == part; if prev_was_doublestar && cur_is_doublestar { continue; } prev_was_doublestar = cur_is_doublestar; // NB: Because the filespec is a String input, calls to `to_str_lossy` are not lossy; the // use of `Path` is strictly for os-independent Path parsing. parts.push(Pattern::new(&part.to_string_lossy()).map_err(|e| { format!("Could not parse {:?} as a glob: {:?}", filespec, e) })?); } PathGlob::parse_globs(canonical_dir, symbolic_path, &parts) } /// /// Given a filespec as Patterns, create a series of PathGlob objects. /// fn parse_globs( canonical_dir: Dir, symbolic_path: PathBuf, parts: &[Pattern], ) -> Result<Vec<PathGlob>, String> { if parts.is_empty() { Ok(vec![]) } else if *DOUBLE_STAR == parts[0].as_str() { if parts.len() == 1 { // Per https://git-scm.com/docs/gitignore: // "A trailing '/**' matches everything inside. For example, 'abc/**' matches all files // inside directory "abc", relative to the location of the .gitignore file, with infinite // depth." return Ok(vec![ PathGlob::dir_wildcard( canonical_dir.clone(), symbolic_path.clone(), SINGLE_STAR_GLOB.clone(), vec![DOUBLE_STAR_GLOB.clone()] ), PathGlob::wildcard( canonical_dir, symbolic_path, SINGLE_STAR_GLOB.clone() ), ]); } // There is a double-wildcard in a dirname of the path: double wildcards are recursive, // so there are two remainder possibilities: one with the double wildcard included, and the // other without. let pathglob_with_doublestar = PathGlob::dir_wildcard( canonical_dir.clone(), symbolic_path.clone(), SINGLE_STAR_GLOB.clone(), parts[0..].to_vec(), ); let pathglob_no_doublestar = if parts.len() == 2 { PathGlob::wildcard(canonical_dir, symbolic_path, parts[1].clone()) } else { PathGlob::dir_wildcard( canonical_dir, symbolic_path, parts[1].clone(), parts[2..].to_vec(), ) }; Ok(vec![pathglob_with_doublestar, pathglob_no_doublestar]) } else if *PARENT_DIR == parts[0].as_str() { // A request for the parent of `canonical_dir`: since we've already expanded the directory // to make it canonical, we can safely drop it directly and recurse without this component. // The resulting symbolic path will continue to contain a literal `..`. let mut canonical_dir_parent = canonical_dir; let mut symbolic_path_parent = symbolic_path; if !canonical_dir_parent.0.pop() { return Err(format!( "Globs may not traverse outside the root: {:?}", parts )); } symbolic_path_parent.push(Path::new(*PARENT_DIR)); PathGlob::parse_globs(canonical_dir_parent, symbolic_path_parent, &parts[1..]) } else if parts.len() == 1 { // This is the path basename. Ok(vec![ PathGlob::wildcard( canonical_dir, symbolic_path, parts[0].clone() ), ]) } else { // This is a path dirname. Ok(vec![ PathGlob::dir_wildcard( canonical_dir, symbolic_path, parts[0].clone(), parts[1..].to_vec() ), ]) } } } #[derive(Debug)] pub struct PathGlobs { include: Vec<PathGlob>, exclude: Arc<Gitignore>, } impl PathGlobs { pub fn create(include: &[String], exclude: &[String]) -> Result<PathGlobs, String> { let ignore_for_exclude = if exclude.is_empty() { EMPTY_IGNORE.clone() } else { Arc::new(create_ignore(exclude).map_err(|e| { format!("Could not parse glob excludes {:?}: {:?}", exclude, e) })?) }; Ok(PathGlobs { include: PathGlob::create(include)?, exclude: ignore_for_exclude, }) } pub fn from_globs(include: Vec<PathGlob>) -> PathGlobs { PathGlobs { include: include, exclude: EMPTY_IGNORE.clone(), } } } #[derive(Debug)] struct PathGlobsExpansion<T: Sized> { context: T, // Globs that have yet to be expanded, in order. todo: Vec<PathGlob>, // Paths to exclude. exclude: Arc<Gitignore>, // Globs that have already been expanded. completed: HashSet<PathGlob>, // Unique Paths that have been matched, in order. outputs: OrderMap<PathStat, ()>, } fn create_ignore(patterns: &[String]) -> Result<Gitignore, ignore::Error> { let mut ignore_builder = GitignoreBuilder::new(""); for pattern in patterns { ignore_builder.add_line(None, pattern.as_str())?; } ignore_builder.build() } fn is_ignored(ignore: &Gitignore, stat: &Stat) -> bool { let is_dir = match stat { &Stat::Dir(_) => true, _ => false, }; match ignore.matched(stat.path(), is_dir) { ignore::Match::None | ignore::Match::Whitelist(_) => false, ignore::Match::Ignore(_) => true, } } /// /// All Stats consumed or return by this type are relative to the root. /// pub struct PosixFS { root: Dir, pool: Arc<ResettablePool>, ignore: Gitignore, } impl PosixFS { pub fn new<P: AsRef<Path>>( root: P, pool: Arc<ResettablePool>, ignore_patterns: Vec<String>, ) -> Result<PosixFS, String> { let root: &Path = root.as_ref(); let canonical_root = root .canonicalize() .and_then(|canonical| { canonical.metadata().and_then( |metadata| if metadata.is_dir() { Ok(Dir(canonical)) } else { Err(io::Error::new( io::ErrorKind::InvalidInput, "Not a directory.", )) }, ) }) .map_err(|e| { format!("Could not canonicalize root {:?}: {:?}", root, e) })?; let ignore = create_ignore(&ignore_patterns).map_err(|e| { format!( "Could not parse build ignore inputs {:?}: {:?}", ignore_patterns, e ) })?; Ok(PosixFS { root: canonical_root, pool: pool, ignore: ignore, }) } fn scandir_sync(root: PathBuf, dir_relative_to_root: Dir) -> Result<Vec<Stat>, io::Error> { let dir_abs = root.join(&dir_relative_to_root.0); let mut stats: Vec<Stat> = dir_abs .read_dir()? .map(|readdir| { let dir_entry = readdir?; let get_metadata = || std::fs::metadata(dir_abs.join(dir_entry.file_name())); PosixFS::stat_internal( dir_relative_to_root.0.join(dir_entry.file_name()), dir_entry.file_type()?, &dir_abs, get_metadata, ) }) .collect::<Result<Vec<_>, io::Error>>()?; stats.sort_by(|s1, s2| s1.path().cmp(s2.path())); Ok(stats) } pub fn is_ignored(&self, stat: &Stat) -> bool { is_ignored(&self.ignore, stat) } pub fn read_file(&self, file: &File) -> BoxFuture<FileContent, io::Error> { let path = file.path.clone(); let path_abs = self.root.0.join(&file.path); self .pool .spawn_fn(move || { std::fs::File::open(&path_abs).and_then(|mut f| { let mut content = Vec::new(); f.read_to_end(&mut content)?; Ok(FileContent { path, content }) }) }) .to_boxed() } pub fn read_link(&self, link: &Link) -> BoxFuture<PathBuf, io::Error> { let link_parent = link.0.parent().map(|p| p.to_owned()); let link_abs = self.root.0.join(link.0.as_path()).to_owned(); self .pool .spawn_fn(move || { link_abs.read_link().and_then( |path_buf| if path_buf.is_absolute() { Err(io::Error::new( io::ErrorKind::InvalidData, format!("Absolute symlink: {:?}", link_abs), )) } else { link_parent.map(|parent| parent.join(path_buf)).ok_or_else( || { io::Error::new( io::ErrorKind::InvalidData, format!("Symlink without a parent?: {:?}", link_abs), ) }, ) }, ) }) .to_boxed() } /// /// Makes a Stat for path_for_stat relative to absolute_path_to_root. /// fn stat_internal<F>( path_for_stat: PathBuf, file_type: std::fs::FileType, absolute_path_to_root: &Path, get_metadata: F, ) -> Result<Stat, io::Error> where F: FnOnce() -> Result<fs::Metadata, io::Error>, { if !path_for_stat.is_relative() { return Err(io::Error::new( io::ErrorKind::InvalidInput, format!( "Argument path_for_stat to PosixFS::stat must be relative path, got {:?}", path_for_stat ), )); } // TODO: Make this an instance method, and stop having to check this every call. if !absolute_path_to_root.is_absolute() { return Err(io::Error::new( io::ErrorKind::InvalidInput, format!( "Argument absolute_path_to_root to PosixFS::stat must be absolute path, got {:?}", absolute_path_to_root ), )); } if file_type.is_dir() { Ok(Stat::Dir(Dir(path_for_stat))) } else if file_type.is_file() { let is_executable = get_metadata()?.permissions().mode() & 0o100 == 0o100; Ok(Stat::File(File { path: path_for_stat, is_executable: is_executable, })) } else if file_type.is_symlink() { Ok(Stat::Link(Link(path_for_stat))) } else { Err(io::Error::new( io::ErrorKind::InvalidData, format!( "Expected File, Dir or Link, but {:?} (relative to {:?}) was a {:?}", path_for_stat, absolute_path_to_root, file_type ), )) } } pub fn stat(&self, relative_path: PathBuf) -> Result<Stat, io::Error> { let metadata = fs::symlink_metadata(self.root.0.join(&relative_path))?; PosixFS::stat_internal( relative_path, metadata.file_type(), &self.root.0, || Ok(metadata), ) } pub fn scandir(&self, dir: &Dir) -> BoxFuture<Vec<Stat>, io::Error> { let dir = dir.to_owned(); let root = self.root.0.clone(); self .pool .spawn_fn(move || PosixFS::scandir_sync(root, dir)) .to_boxed() } } impl VFS<io::Error> for Arc<PosixFS> { fn read_link(&self, link: Link) -> BoxFuture<PathBuf, io::Error> { PosixFS::read_link(self, &link) } fn scandir(&self, dir: Dir) -> BoxFuture<Vec<Stat>, io::Error> { PosixFS::scandir(self, &dir) } fn is_ignored(&self, stat: &Stat) -> bool { PosixFS::is_ignored(self, stat) } fn mk_error(msg: &str) -> io::Error { io::Error::new(io::ErrorKind::Other, msg) } } /// /// A context for filesystem operations parameterized on an error type 'E'. /// pub trait VFS<E: Send + Sync + 'static>: Clone + Send + Sync + 'static { fn read_link(&self, link: Link) -> BoxFuture<PathBuf, E>; fn scandir(&self, dir: Dir) -> BoxFuture<Vec<Stat>, E>; fn is_ignored(&self, stat: &Stat) -> bool; fn mk_error(msg: &str) -> E; /// /// Canonicalize the Link for the given Path to an underlying File or Dir. May result /// in None if the PathStat represents a broken Link. /// /// Skips ignored paths both before and after expansion. /// /// TODO: Should handle symlink loops (which would exhibit as an infinite loop in expand). /// fn canonicalize(&self, symbolic_path: PathBuf, link: Link) -> BoxFuture<Option<PathStat>, E> { // Read the link, which may result in PathGlob(s) that match 0 or 1 Path. let context = self.clone(); self .read_link(link) .map(|dest_path| { // If the link destination can't be parsed as PathGlob(s), it is broken. dest_path .to_str() .and_then(|dest_str| { // Escape any globs in the parsed dest, which should guarantee one output PathGlob. PathGlob::create(&[Pattern::escape(dest_str)]).ok() }) .unwrap_or_else(|| vec![]) }) .and_then(move |link_globs| { context.expand(PathGlobs::from_globs(link_globs)) }) .map(|mut path_stats| { // Since we've escaped any globs in the parsed path, expect either 0 or 1 destination. path_stats.pop().map(|ps| match ps { PathStat::Dir { stat, .. } => PathStat::dir(symbolic_path, stat), PathStat::File { stat, .. } => PathStat::file(symbolic_path, stat), }) }) .to_boxed() } fn directory_listing( &self, canonical_dir: Dir, symbolic_path: PathBuf, wildcard: Pattern, exclude: &Arc<Gitignore>, ) -> BoxFuture<Vec<PathStat>, E> { // List the directory. let context = self.clone(); let exclude = exclude.clone(); self .scandir(canonical_dir) .and_then(move |dir_listing| { // Match any relevant Stats, and join them into PathStats. future::join_all( dir_listing .into_iter() .filter(|stat| { // Match relevant filenames. stat .path() .file_name() .map(|file_name| wildcard.matches_path(Path::new(file_name))) .unwrap_or(false) }) .filter_map(|stat| { // Append matched filenames. stat .path() .file_name() .map(|file_name| symbolic_path.join(file_name)) .map(|symbolic_stat_path| (symbolic_stat_path, stat)) }) .map(|(stat_symbolic_path, stat)| { // Canonicalize matched PathStats, and filter paths that are ignored by either the // context, or by local excludes. Note that we apply context ignore patterns to both // the symbolic and canonical names of Links, but only apply local excludes to their // symbolic names. if context.is_ignored(&stat) || is_ignored(&exclude, &stat) { future::ok(None).to_boxed() } else { match stat { Stat::Link(l) => context.canonicalize(stat_symbolic_path, l), Stat::Dir(d) => { future::ok(Some(PathStat::dir(stat_symbolic_path.to_owned(), d))).to_boxed() } Stat::File(f) => { future::ok(Some(PathStat::file(stat_symbolic_path.to_owned(), f))).to_boxed() } } } }) .collect::<Vec<_>>(), ) }) .map(|path_stats| { // See the TODO above. path_stats.into_iter().filter_map(|pso| pso).collect() }) .to_boxed() } /// /// Recursively expands PathGlobs into PathStats while applying excludes. /// fn expand(&self, path_globs: PathGlobs) -> BoxFuture<Vec<PathStat>, E> { if path_globs.include.is_empty() { return future::ok(vec![]).to_boxed(); } let init = PathGlobsExpansion { context: self.clone(), todo: path_globs.include, exclude: path_globs.exclude, completed: HashSet::default(), outputs: OrderMap::default(), }; future::loop_fn(init, |mut expansion| { // Request the expansion of all outstanding PathGlobs as a batch. let round = future::join_all({ let exclude = &expansion.exclude; let context = &expansion.context; expansion .todo .drain(..) .map(|path_glob| context.expand_single(path_glob, exclude)) .collect::<Vec<_>>() }); round.map(move |paths_and_globs| { // Collect distinct new PathStats and PathGlobs for (paths, globs) in paths_and_globs.into_iter() { expansion.outputs.extend(paths.into_iter().map(|p| (p, ()))); let completed = &mut expansion.completed; expansion.todo.extend(globs.into_iter().filter(|pg| { completed.insert(pg.clone()) })); } // If there were any new PathGlobs, continue the expansion. if expansion.todo.is_empty() { future::Loop::Break(expansion) } else { future::Loop::Continue(expansion) } }) }).map(|expansion| { assert!( expansion.todo.is_empty(), "Loop shouldn't have exited with work to do: {:?}", expansion.todo, ); // Finally, capture the resulting PathStats from the expansion. expansion.outputs.into_iter().map(|(k, _)| k).collect() }) .to_boxed() } /// /// Apply a PathGlob, returning PathStats and additional PathGlobs that are needed for the /// expansion. /// fn expand_single( &self, path_glob: PathGlob, exclude: &Arc<Gitignore>, ) -> BoxFuture<(Vec<PathStat>, Vec<PathGlob>), E> { match path_glob { PathGlob::Wildcard { canonical_dir, symbolic_path, wildcard } => // Filter directory listing to return PathStats, with no continuation. self.directory_listing(canonical_dir, symbolic_path, wildcard, exclude) .map(|path_stats| (path_stats, vec![])) .to_boxed(), PathGlob::DirWildcard { canonical_dir, symbolic_path, wildcard, remainder } => // Filter directory listing and request additional PathGlobs for matched Dirs. self.directory_listing(canonical_dir, symbolic_path, wildcard, exclude) .and_then(move |path_stats| { path_stats.into_iter() .filter_map(|ps| match ps { PathStat::Dir { path, stat } => Some( PathGlob::parse_globs(stat, path, &remainder) .map_err(|e| Self::mk_error(e.as_str())) ), PathStat::File { .. } => None, }) .collect::<Result<Vec<_>, E>>() }) .map(|path_globs| { let flattened = path_globs.into_iter() .flat_map(|path_globs| path_globs.into_iter()) .collect(); (vec![], flattened) }) .to_boxed(), } } } pub struct FileContent { pub path: PathBuf, pub content: Vec<u8>, } impl fmt::Debug for FileContent { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let len = min(self.content.len(), 5); let describer = if len < self.content.len() { "starting " } else { "" }; write!( f, "FileContent(path={:?}, content={} bytes {}{:?})", self.path, self.content.len(), describer, &self.content[..len] ) } } // Like std::fs::create_dir_all, except handles concurrent calls among multiple // threads or processes. Originally lifted from rustc. pub fn safe_create_dir_all_ioerror(path: &Path) -> Result<(), io::Error> { match fs::create_dir(path) { Ok(()) => return Ok(()), Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => return Ok(()), Err(ref e) if e.kind() == io::ErrorKind::NotFound => {} Err(e) => return Err(e), } match path.parent() { Some(p) => try!(safe_create_dir_all_ioerror(p)), None => return Ok(()), } match fs::create_dir(path) { Ok(()) => Ok(()), Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => Ok(()), Err(e) => Err(e), } } fn safe_create_dir_all(path: &Path) -> Result<(), String> { safe_create_dir_all_ioerror(path).map_err(|e| { format!("Failed to create dir {:?} due to {:?}", path, e) }) } fn safe_create_tmpdir_in(base_dir: &Path, prefix: &str) -> Result<TempDir, String> { safe_create_dir_all(&base_dir)?; Ok(TempDir::new_in(&base_dir, prefix).map_err(|e| { format!("Failed to create tempdir {:?} due to {:?}", base_dir, e) })?) } /// /// A facade for the snapshot directory, which lives under the pants workdir. /// pub struct Snapshots { snapshots_dir: PathBuf, snapshots_generator: Mutex<(TempDir, usize)>, } impl Snapshots { pub fn new(snapshots_dir: PathBuf) -> Result<Snapshots, String> { let snapshots_tmpdir = safe_create_tmpdir_in(&snapshots_dir, ".tmp")?; Ok(Snapshots { snapshots_dir: snapshots_dir, snapshots_generator: Mutex::new((snapshots_tmpdir, 0)), }) } pub fn snapshot_path(&self) -> &Path { self.snapshots_dir.as_path() } fn next_temp_path(&self) -> Result<PathBuf, String> { let mut gen = self.snapshots_generator.lock().unwrap(); gen.1 += 1; // N.B. Sometimes, in e.g. a `./pants clean-all test ...` the snapshot tempdir created at the // beginning of a run can be removed out from under us by e.g. the `clean-all` task. Here, we // we double check existence of the `TempDir`'s path when the path is accessed and replace if // necessary. if !gen.0.path().exists() { gen.0 = safe_create_tmpdir_in(&self.snapshots_dir, ".tmp")?; } Ok(gen.0.path().join(format!("{}.tmp", gen.1))) } /// /// A non-canonical (does not expand symlinks) in-memory form of normalize. Used to collapse /// parent and cur components, which are legal in symbolic paths in PathStats, but not in /// Tar files. /// fn normalize(path: &Path) -> Result<PathBuf, String> { let mut res = PathBuf::new(); for component in path.components() { match component { Component::Prefix(..) | Component::RootDir => return Err(format!("Absolute paths not supported: {:?}", path)), Component::CurDir => continue, Component::ParentDir => { // Pop the previous component. if !res.pop() { return Err(format!( "Globs may not traverse outside the root: {:?}", path )); } else { continue; } } Component::Normal(p) => res.push(p), } } Ok(res) } /// /// Create a tar file on the given Write instance containing the given paths, or /// return an error string. /// fn tar_create<W: io::Write>( dest: W, paths: &Vec<PathStat>, relative_to: &Dir, ) -> Result<W, String> { let mut tar_builder = tar::Builder::new(dest); tar_builder.mode(tar::HeaderMode::Deterministic); for path_stat in paths { // Append the PathStat using the symbolic name and underlying stat. let append_res = match path_stat { &PathStat::File { ref path, ref stat } => { let normalized = Snapshots::normalize(path)?; let mut input = fs::File::open(relative_to.0.join(stat.path.as_path())) .map_err(|e| format!("Failed to open {:?}: {:?}", path_stat, e))?; tar_builder.append_file(normalized, &mut input) } &PathStat::Dir { ref path, ref stat } => { let normalized = Snapshots::normalize(path)?; tar_builder.append_dir(normalized, relative_to.0.join(stat.0.as_path())) } }; append_res.map_err(|e| { format!("Failed to tar {:?}: {:?}", path_stat, e) })?; } // Finish the tar file, returning ownership of the stream to the caller. Ok(tar_builder.into_inner().map_err(|e| { format!("Failed to finalize snapshot tar: {:?}", e) })?) } /// /// Create a tar file at the given dest Path containing the given paths, while /// fingerprinting the written stream. /// fn tar_create_fingerprinted( dest: &Path, paths: &Vec<PathStat>, relative_to: &Dir, ) -> Result<Fingerprint, String> { // Wrap buffering around a fingerprinted stream above a File. let stream = io::BufWriter::new(WriterHasher::new(fs::File::create(dest).map_err(|e| { format!("Failed to create destination file: {:?}", e) })?)); // Then append the tar to the stream, and retrieve the Fingerprint to flush all writers. Ok( Snapshots::tar_create(stream, paths, relative_to)? .into_inner() .map_err(|e| { format!("Failed to flush to {:?}: {:?}", dest, e.error()) })? .finish(), ) } /// /// Attempts to rename src to dst, and _succeeds_ if dst already exists. This is safe in /// the case of Snapshots because the destination path is unique to its content. /// fn finalize(temp_path: &Path, dest_path: &Path) -> Result<(), String> { if dest_path.is_file() { // The Snapshot has already been created. fs::remove_file(temp_path).unwrap_or(()); Ok(()) } else { let dest_dir = dest_path.parent().expect( "All snapshot paths must have parent directories.", ); safe_create_dir_all(dest_dir)?; match fs::rename(temp_path, dest_path) { Ok(_) => Ok(()), Err(_) if dest_path.is_file() => Ok(()), Err(e) => Err(format!( "Failed to finalize snapshot at {:?}: {:?}", dest_path, e )), } } } fn path_for(&self, fingerprint: &Fingerprint) -> PathBuf { Snapshots::path_under_for(self.snapshot_path(), fingerprint) } fn path_under_for(path: &Path, fingerprint: &Fingerprint) -> PathBuf { let hex = fingerprint.to_hex(); path.join(&hex[0..2]).join(&hex[2..4]).join( format!("{}.tar", hex), ) } /// /// Creates a Snapshot for the given paths under the given VFS. /// pub fn create(&self, fs: &PosixFS, paths: Vec<PathStat>) -> CpuFuture<Snapshot, String> { let dest_dir = self.snapshot_path().to_owned(); let root = fs.root.clone(); let temp_path = self.next_temp_path().expect( "Couldn't get the next temp path.", ); fs.pool.spawn_fn(move || { // Write the tar deterministically to a temporary file while fingerprinting. let fingerprint = Snapshots::tar_create_fingerprinted(temp_path.as_path(), &paths, &root)?; // Rename to the final path if it does not already exist. Snapshots::finalize( temp_path.as_path(), Snapshots::path_under_for(&dest_dir, &fingerprint).as_path(), )?; Ok(Snapshot { fingerprint: fingerprint, digest: None, path_stats: paths, }) }) } fn contents_for_sync(snapshot: Snapshot, path: PathBuf) -> Result<Vec<FileContent>, io::Error> { let mut archive = fs::File::open(path).map(|f| tar::Archive::new(f))?; // Zip the in-memory Snapshot to the on disk representation, validating as we go. let mut files_content = Vec::new(); for (entry_res, path_stat) in archive.entries()?.zip(snapshot.path_stats.into_iter()) { let mut entry = entry_res?; if entry.header().entry_type() == tar::EntryType::file() { let path = match path_stat { PathStat::File { path, .. } => path, PathStat::Dir { .. } => panic!("Snapshot contents changed after storage."), }; let mut content = Vec::new(); io::Read::read_to_end(&mut entry, &mut content)?; files_content.push(FileContent { path: path, content: content, }); } } Ok(files_content) } pub fn contents_for( &self, fs: &PosixFS, snapshot: Snapshot, ) -> CpuFuture<Vec<FileContent>, String> { let archive_path = self.path_for(&snapshot.fingerprint); fs.pool.spawn_fn(move || { let snapshot_str = format!("{:?}", snapshot); Snapshots::contents_for_sync(snapshot, archive_path).map_err(|e| { format!("Failed to open Snapshot {}: {:?}", snapshot_str, e) }) }) } } #[cfg(test)] mod posixfs_test { extern crate tempdir; extern crate testutil; use super::{Dir, File, Link, PosixFS, Stat, ResettablePool}; use futures::Future; use self::testutil::make_file; use std; use std::path::{Path, PathBuf}; use std::sync::Arc; #[test] fn is_executable_false() { let dir = tempdir::TempDir::new("posixfs").unwrap(); make_file(&dir.path().join("marmosets"), &[], 0o611); assert_only_file_is_executable(dir.path(), false); } #[test] fn is_executable_true() { let dir = tempdir::TempDir::new("posixfs").unwrap(); make_file(&dir.path().join("photograph_marmosets"), &[], 0o700); assert_only_file_is_executable(dir.path(), true); } #[test] fn read_file() { let dir = tempdir::TempDir::new("posixfs").unwrap(); let path = PathBuf::from("marmosets"); let content = "cute".as_bytes().to_vec(); make_file( &std::fs::canonicalize(dir.path()).unwrap().join(&path), &content, 0o600, ); let fs = new_posixfs(&dir.path()); let file_content = fs.read_file(&File { path: path.clone(), is_executable: false, }).wait() .unwrap(); assert_eq!(file_content.path, path); assert_eq!(file_content.content, content); } #[test] fn read_file_missing() { let dir = tempdir::TempDir::new("posixfs").unwrap(); new_posixfs(&dir.path()) .read_file(&File { path: PathBuf::from("marmosets"), is_executable: false, }) .wait() .expect_err("Expected error"); } #[test] fn stat_executable_file() { let dir = tempdir::TempDir::new("posixfs").unwrap(); let posix_fs = new_posixfs(&dir.path()); let path = PathBuf::from("photograph_marmosets"); make_file(&dir.path().join(&path), &[], 0o700); assert_eq!( posix_fs.stat(path.clone()).unwrap(), super::Stat::File(File { path: path, is_executable: true, }) ) } #[test] fn stat_nonexecutable_file() { let dir = tempdir::TempDir::new("posixfs").unwrap(); let posix_fs = new_posixfs(&dir.path()); let path = PathBuf::from("marmosets"); make_file(&dir.path().join(&path), &[], 0o600); assert_eq!( posix_fs.stat(path.clone()).unwrap(), super::Stat::File(File { path: path, is_executable: false, }) ) } #[test] fn stat_dir() { let dir = tempdir::TempDir::new("posixfs").unwrap(); let posix_fs = new_posixfs(&dir.path()); let path = PathBuf::from("enclosure"); std::fs::create_dir(dir.path().join(&path)).unwrap(); assert_eq!( posix_fs.stat(path.clone()).unwrap(), super::Stat::Dir(Dir(path)) ) } #[test] fn stat_symlink() { let dir = tempdir::TempDir::new("posixfs").unwrap(); let posix_fs = new_posixfs(&dir.path());<|fim▁hole|> let path = PathBuf::from("marmosets"); make_file(&dir.path().join(&path), &[], 0o600); let link_path = PathBuf::from("remarkably_similar_marmoset"); std::os::unix::fs::symlink(&dir.path().join(path), dir.path().join(&link_path)).unwrap(); assert_eq!( posix_fs.stat(link_path.clone()).unwrap(), super::Stat::Link(Link(link_path)) ) } #[test] fn stat_other() { new_posixfs("/dev").stat(PathBuf::from("null")).expect_err( "Want error", ); } #[test] fn stat_missing() { let dir = tempdir::TempDir::new("posixfs").unwrap(); let posix_fs = new_posixfs(&dir.path()); posix_fs.stat(PathBuf::from("no_marmosets")).expect_err( "Want error", ); } #[test] fn scandir_empty() { let dir = tempdir::TempDir::new("posixfs").unwrap(); let posix_fs = new_posixfs(&dir.path()); let path = PathBuf::from("empty_enclosure"); std::fs::create_dir(dir.path().join(&path)).unwrap(); assert_eq!(posix_fs.scandir(&Dir(path)).wait().unwrap(), vec![]); } #[test] fn scandir() { let dir = tempdir::TempDir::new("posixfs").unwrap(); let posix_fs = new_posixfs(&dir.path()); let path = PathBuf::from("enclosure"); std::fs::create_dir(dir.path().join(&path)).unwrap(); let a_marmoset = path.join("a_marmoset"); let feed = path.join("feed"); let hammock = path.join("hammock"); let remarkably_similar_marmoset = path.join("remarkably_similar_marmoset"); let sneaky_marmoset = path.join("sneaky_marmoset"); make_file(&dir.path().join(&feed), &[], 0o700); make_file(&dir.path().join(&a_marmoset), &[], 0o600); make_file(&dir.path().join(&sneaky_marmoset), &[], 0o600); std::os::unix::fs::symlink( &dir.path().join(&a_marmoset), dir.path().join( &dir.path().join(&remarkably_similar_marmoset), ), ).unwrap(); std::fs::create_dir(dir.path().join(&hammock)).unwrap(); make_file( &dir.path().join(&hammock).join("napping_marmoset"), &[], 0o600, ); assert_eq!( posix_fs.scandir(&Dir(path)).wait().unwrap(), vec![ Stat::File(File { path: a_marmoset, is_executable: false, }), Stat::File(File { path: feed, is_executable: true, }), Stat::Dir(Dir(hammock)), Stat::Link(Link(remarkably_similar_marmoset)), Stat::File(File { path: sneaky_marmoset, is_executable: false, }), ] ); } #[test] fn scandir_missing() { let dir = tempdir::TempDir::new("posixfs").unwrap(); let posix_fs = new_posixfs(&dir.path()); posix_fs .scandir(&Dir(PathBuf::from("no_marmosets_here"))) .wait() .expect_err("Want error"); } fn assert_only_file_is_executable(path: &Path, want_is_executable: bool) { let fs = new_posixfs(path); let stats = fs.scandir(&Dir(PathBuf::from("."))).wait().unwrap(); assert_eq!(stats.len(), 1); match stats.get(0).unwrap() { &super::Stat::File(File { is_executable: got, .. }) => assert_eq!(want_is_executable, got), other => panic!("Expected file, got {:?}", other), } } fn new_posixfs<P: AsRef<Path>>(dir: P) -> PosixFS { PosixFS::new( dir.as_ref(), Arc::new(ResettablePool::new("test-pool-".to_string())), vec![], ).unwrap() } }<|fim▁end|>
<|file_name|>CreateConstructorRequest.java<|end_file_name|><|fim▁begin|>// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.lang.jvm.actions; public interface CreateConstructorRequest extends CreateExecutableRequest {<|fim▁hole|><|fim▁end|>
}
<|file_name|>event.rs<|end_file_name|><|fim▁begin|>/* TOOD: Implement for other kqueue based systems */ use {Errno, Result}; #[cfg(not(target_os = "netbsd"))] use libc::{timespec, time_t, c_int, c_long, intptr_t, uintptr_t}; #[cfg(target_os = "netbsd")] use libc::{timespec, time_t, c_long, intptr_t, uintptr_t, size_t}; use libc; use std::os::unix::io::RawFd; use std::ptr; use std::mem; // Redefine kevent in terms of programmer-friendly enums and bitfields. #[derive(Clone, Copy)] #[repr(C)] pub struct KEvent { kevent: libc::kevent, } #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "openbsd"))] type type_of_udata = *mut libc::c_void; #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos"))] type type_of_data = libc::intptr_t; #[cfg(any(target_os = "netbsd"))] type type_of_udata = intptr_t; #[cfg(any(target_os = "netbsd", target_os = "openbsd"))] type type_of_data = libc::int64_t; #[cfg(target_os = "netbsd")] type type_of_event_filter = u32; #[cfg(not(target_os = "netbsd"))] type type_of_event_filter = i16; libc_enum! { #[cfg_attr(target_os = "netbsd", repr(u32))] #[cfg_attr(not(target_os = "netbsd"), repr(i16))] pub enum EventFilter { EVFILT_AIO, #[cfg(target_os = "dragonfly")] EVFILT_EXCEPT, #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos"))] EVFILT_FS, #[cfg(target_os = "freebsd")] EVFILT_LIO, #[cfg(any(target_os = "ios", target_os = "macos"))] EVFILT_MACHPORT, EVFILT_PROC, EVFILT_READ, EVFILT_SIGNAL, EVFILT_TIMER, #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos"))] EVFILT_USER, #[cfg(any(target_os = "ios", target_os = "macos"))] EVFILT_VM, EVFILT_VNODE, EVFILT_WRITE, } } #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "openbsd"))] pub type type_of_event_flag = u16; #[cfg(any(target_os = "netbsd"))] pub type type_of_event_flag = u32; libc_bitflags!{ pub struct EventFlag: type_of_event_flag { EV_ADD; EV_CLEAR; EV_DELETE; EV_DISABLE; // No released version of OpenBSD supports EV_DISPATCH or EV_RECEIPT. // These have been commited to the -current branch though and are // expected to be part of the OpenBSD 6.2 release in Nov 2017. // See: https://marc.info/?l=openbsd-tech&m=149621427511219&w=2 // https://github.com/rust-lang/libc/pull/613 #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "netbsd"))] EV_DISPATCH; #[cfg(target_os = "freebsd")] EV_DROP; EV_ENABLE; EV_EOF; EV_ERROR; #[cfg(any(target_os = "macos", target_os = "ios"))] EV_FLAG0; EV_FLAG1; #[cfg(target_os = "dragonfly")] EV_NODATA; EV_ONESHOT; #[cfg(any(target_os = "macos", target_os = "ios"))] EV_OOBAND; #[cfg(any(target_os = "macos", target_os = "ios"))] EV_POLL; #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "netbsd"))] EV_RECEIPT; EV_SYSFLAGS; } } libc_bitflags!( pub struct FilterFlag: u32 { #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_ABSOLUTE; NOTE_ATTRIB; NOTE_CHILD; NOTE_DELETE; #[cfg(target_os = "openbsd")] NOTE_EOF; NOTE_EXEC; NOTE_EXIT; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_EXIT_REPARENTED; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_EXITSTATUS; NOTE_EXTEND; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_FFAND; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_FFCOPY; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_FFCTRLMASK; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_FFLAGSMASK; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_FFNOP; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_FFOR; NOTE_FORK; NOTE_LINK; NOTE_LOWAT; #[cfg(target_os = "freebsd")] NOTE_MSECONDS; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_NONE; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd"))] NOTE_NSECONDS; #[cfg(target_os = "dragonfly")] NOTE_OOB; NOTE_PCTRLMASK; NOTE_PDATAMASK; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_REAP; NOTE_RENAME; NOTE_REVOKE; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd"))] NOTE_SECONDS; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_SIGNAL; NOTE_TRACK; NOTE_TRACKERR; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_TRIGGER; #[cfg(target_os = "openbsd")] NOTE_TRUNCATE; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd"))] NOTE_USECONDS; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_VM_ERROR; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_VM_PRESSURE; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_VM_PRESSURE_SUDDEN_TERMINATE; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_VM_PRESSURE_TERMINATE; NOTE_WRITE; } ); pub fn kqueue() -> Result<RawFd> { let res = unsafe { libc::kqueue() }; Errno::result(res) } // KEvent can't derive Send because on some operating systems, udata is defined // as a void*. However, KEvent's public API always treats udata as an intptr_t, // which is safe to Send. unsafe impl Send for KEvent { } impl KEvent { pub fn new(ident: uintptr_t, filter: EventFilter, flags: EventFlag, fflags:FilterFlag, data: intptr_t, udata: intptr_t) -> KEvent { KEvent { kevent: libc::kevent { ident: ident, filter: filter as type_of_event_filter, flags: flags.bits(), fflags: fflags.bits(), data: data as type_of_data, udata: udata as type_of_udata } } } pub fn ident(&self) -> uintptr_t { self.kevent.ident } pub fn filter(&self) -> EventFilter { unsafe { mem::transmute(self.kevent.filter as type_of_event_filter) } } pub fn flags(&self) -> EventFlag { EventFlag::from_bits(self.kevent.flags).unwrap() } pub fn fflags(&self) -> FilterFlag { FilterFlag::from_bits(self.kevent.fflags).unwrap() } pub fn data(&self) -> intptr_t { self.kevent.data as intptr_t } pub fn udata(&self) -> intptr_t { self.kevent.udata as intptr_t } } pub fn kevent(kq: RawFd, changelist: &[KEvent], eventlist: &mut [KEvent], timeout_ms: usize) -> Result<usize> { // Convert ms to timespec let timeout = timespec { tv_sec: (timeout_ms / 1000) as time_t, tv_nsec: ((timeout_ms % 1000) * 1_000_000) as c_long }; kevent_ts(kq, changelist, eventlist, Some(timeout)) } #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly", target_os = "openbsd"))] type type_of_nchanges = c_int; #[cfg(target_os = "netbsd")] type type_of_nchanges = size_t; pub fn kevent_ts(kq: RawFd, changelist: &[KEvent], eventlist: &mut [KEvent], timeout_opt: Option<timespec>) -> Result<usize> { let res = unsafe { libc::kevent( kq, changelist.as_ptr() as *const libc::kevent, changelist.len() as type_of_nchanges, eventlist.as_mut_ptr() as *mut libc::kevent, eventlist.len() as type_of_nchanges, if let Some(ref timeout) = timeout_opt {timeout as *const timespec} else {ptr::null()}) }; Errno::result(res).map(|r| r as usize) }<|fim▁hole|> #[inline] pub fn ev_set(ev: &mut KEvent, ident: usize, filter: EventFilter, flags: EventFlag, fflags: FilterFlag, udata: intptr_t) { ev.kevent.ident = ident as uintptr_t; ev.kevent.filter = filter as type_of_event_filter; ev.kevent.flags = flags.bits(); ev.kevent.fflags = fflags.bits(); ev.kevent.data = 0; ev.kevent.udata = udata as type_of_udata; } #[test] fn test_struct_kevent() { let udata : intptr_t = 12345; let expected = libc::kevent{ident: 0xdeadbeef, filter: libc::EVFILT_READ, flags: libc::EV_ONESHOT | libc::EV_ADD, fflags: libc::NOTE_CHILD | libc::NOTE_EXIT, data: 0x1337, udata: udata as type_of_udata}; let actual = KEvent::new(0xdeadbeef, EventFilter::EVFILT_READ, EV_ONESHOT | EV_ADD, NOTE_CHILD | NOTE_EXIT, 0x1337, udata); assert!(expected.ident == actual.ident()); assert!(expected.filter == actual.filter() as type_of_event_filter); assert!(expected.flags == actual.flags().bits()); assert!(expected.fflags == actual.fflags().bits()); assert!(expected.data == actual.data() as type_of_data); assert!(expected.udata == actual.udata() as type_of_udata); assert!(mem::size_of::<libc::kevent>() == mem::size_of::<KEvent>()); }<|fim▁end|>
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2013 Agile Business Group sagl # (<http://www.agilebg.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': "Purchase orders - Force number", 'version': '0.1', 'category': 'Purchase Management', 'summary': "Force purchase orders numeration", 'description': """ This simple module allows to specify the number to use when creating purchase orders. If user does not change the default value ('/'), the standard sequence is used.""",<|fim▁hole|> 'author': "Agile Business Group,Odoo Community Association (OCA)", 'website': 'http://www.agilebg.com', 'license': 'AGPL-3', "depends": ['purchase'], "data": [ 'purchase_view.xml', ], "demo": [], "active": False, "installable": False }<|fim▁end|>
<|file_name|>qgsgeometry.cpp<|end_file_name|><|fim▁begin|>/*************************************************************************** qgsgeometry.cpp - Geometry (stored as Open Geospatial Consortium WKB) ------------------------------------------------------------------- Date : 02 May 2005 Copyright : (C) 2005 by Brendan Morley email : morb at ozemail dot com dot au *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ #include <limits> #include <cstdarg> #include <cstdio> #include <cmath> #include "qgis.h" #include "qgsgeometry.h" #include "qgsapplication.h" #include "qgslogger.h" #include "qgsmessagelog.h" #include "qgspoint.h" #include "qgsrectangle.h" #include "qgsmaplayerregistry.h" #include "qgsvectorlayer.h" #include "qgsproject.h" #include "qgsgeometryvalidator.h" #include <QDebug> #ifndef Q_OS_WIN #include <netinet/in.h> #else #include <winsock.h> #endif #define DEFAULT_QUADRANT_SEGMENTS 8 #define CATCH_GEOS(r) \ catch (GEOSException &e) \ { \ QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr("GEOS") ); \ return r; \ } class GEOSException { public: GEOSException( QString theMsg ) { if ( theMsg == "Unknown exception thrown" && lastMsg.isNull() ) { msg = theMsg; } else { msg = theMsg; lastMsg = msg; } } // copy constructor GEOSException( const GEOSException &rhs ) { *this = rhs; } ~GEOSException() { if ( lastMsg == msg ) lastMsg = QString::null; } QString what() { return msg; } private: QString msg; static QString lastMsg; }; QString GEOSException::lastMsg; static void throwGEOSException( const char *fmt, ... ) { va_list ap; char buffer[1024]; va_start( ap, fmt ); vsnprintf( buffer, sizeof buffer, fmt, ap ); va_end( ap ); qWarning() << QString( "GEOS exception: %1" ).arg( buffer ); throw GEOSException( QString::fromUtf8( buffer ) ); } static void printGEOSNotice( const char *fmt, ... ) { #if defined(QGISDEBUG) va_list ap; char buffer[1024]; va_start( ap, fmt ); vsnprintf( buffer, sizeof buffer, fmt, ap ); va_end( ap ); QgsDebugMsg( QString( "GEOS notice: %1" ).arg( QString::fromUtf8( buffer ) ) ); #else Q_UNUSED( fmt ); #endif } class GEOSInit { public: GEOSContextHandle_t ctxt; GEOSInit() { ctxt = initGEOS_r( printGEOSNotice, throwGEOSException ); } ~GEOSInit() { finishGEOS_r( ctxt ); } }; static GEOSInit geosinit; GEOSContextHandle_t QgsGeometry::getGEOSHandler() { return geosinit.ctxt; } QgsGeometry::QgsGeometry() : mGeometry( 0 ) , mGeometrySize( 0 ) , mGeos( 0 ) , mDirtyWkb( false ) , mDirtyGeos( false ) { } QgsGeometry::QgsGeometry( QgsGeometry const & rhs ) : mGeometry( 0 ) , mGeometrySize( rhs.mGeometrySize ) , mDirtyWkb( rhs.mDirtyWkb ) , mDirtyGeos( rhs.mDirtyGeos ) { if ( mGeometrySize && rhs.mGeometry ) { mGeometry = new unsigned char[mGeometrySize]; memcpy( mGeometry, rhs.mGeometry, mGeometrySize ); } // deep-copy the GEOS Geometry if appropriate if ( rhs.mGeos ) mGeos = GEOSGeom_clone_r( geosinit.ctxt, rhs.mGeos ); else mGeos = 0; } //! Destructor QgsGeometry::~QgsGeometry() { if ( mGeometry ) delete [] mGeometry; if ( mGeos ) GEOSGeom_destroy_r( geosinit.ctxt, mGeos ); } static unsigned int getNumGeosPoints( const GEOSGeometry *geom ) { unsigned int n; const GEOSCoordSequence *cs = GEOSGeom_getCoordSeq_r( geosinit.ctxt, geom ); GEOSCoordSeq_getSize_r( geosinit.ctxt, cs, &n ); return n; } static GEOSGeometry *createGeosPoint( const double x, const double y ) { GEOSCoordSequence *coord = GEOSCoordSeq_create_r( geosinit.ctxt, 1, 2 ); GEOSCoordSeq_setX_r( geosinit.ctxt, coord, 0, x ); GEOSCoordSeq_setY_r( geosinit.ctxt, coord, 0, y ); return GEOSGeom_createPoint_r( geosinit.ctxt, coord ); } static GEOSGeometry *createGeosPoint( const QgsPoint &point ) { return createGeosPoint( point.x(), point.y() ); } static GEOSCoordSequence *createGeosCoordSequence( const QgsPolyline& points ) { GEOSCoordSequence *coord = 0; try { coord = GEOSCoordSeq_create_r( geosinit.ctxt, points.count(), 2 ); int i; for ( i = 0; i < points.count(); i++ ) { GEOSCoordSeq_setX_r( geosinit.ctxt, coord, i, points[i].x() ); GEOSCoordSeq_setY_r( geosinit.ctxt, coord, i, points[i].y() ); } return coord; } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); /*if ( coord ) GEOSCoordSeq_destroy( coord );*/ throw; } } static GEOSGeometry *createGeosCollection( int typeId, QVector<GEOSGeometry*> geoms ) { GEOSGeometry **geomarr = new GEOSGeometry*[ geoms.size()]; if ( !geomarr ) return 0; for ( int i = 0; i < geoms.size(); i++ ) geomarr[i] = geoms[i]; GEOSGeometry *geom = 0; try { geom = GEOSGeom_createCollection_r( geosinit.ctxt, typeId, geomarr, geoms.size() ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); } delete [] geomarr; return geom; } static GEOSGeometry *createGeosLineString( const QgsPolyline& polyline ) { GEOSCoordSequence *coord = 0; try { coord = createGeosCoordSequence( polyline ); return GEOSGeom_createLineString_r( geosinit.ctxt, coord ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); //MH: for strange reasons, geos3 crashes when removing the coordinate sequence //if ( coord ) //GEOSCoordSeq_destroy( coord ); return 0; } } static GEOSGeometry *createGeosLinearRing( const QgsPolyline& polyline ) { GEOSCoordSequence *coord = 0; if ( polyline.count() <= 2 ) return 0; try { if ( polyline[0] != polyline[polyline.size()-1] ) { // Ring not closed QgsPolyline closed( polyline ); closed << closed[0]; coord = createGeosCoordSequence( closed ); } else { // XXX [MD] this exception should not be silenced! // this is here just because maptopixel simplification can return invalid linear rings if ( polyline.count() == 3 ) //-> Avoid 'GEOS::IllegalArgumentException: Invalid number of points in LinearRing found 3 - must be 0 or >= 4' return 0; coord = createGeosCoordSequence( polyline ); } return GEOSGeom_createLinearRing_r( geosinit.ctxt, coord ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); /* as MH has noticed ^, this crashes geos if ( coord ) GEOSCoordSeq_destroy( coord );*/ return 0; } } static GEOSGeometry *createGeosPolygon( const QVector<GEOSGeometry*> &rings ) { GEOSGeometry *shell; if ( rings.size() == 0 ) { #if defined(GEOS_VERSION_MAJOR) && defined(GEOS_VERSION_MINOR) && \ ((GEOS_VERSION_MAJOR>3) || ((GEOS_VERSION_MAJOR==3) && (GEOS_VERSION_MINOR>=3))) return GEOSGeom_createEmptyPolygon_r( geosinit.ctxt ); #else shell = GEOSGeom_createLinearRing_r( geosinit.ctxt, GEOSCoordSeq_create_r( geosinit.ctxt, 0, 2 ) ); #endif } else { shell = rings[0]; } GEOSGeometry **holes = NULL; int nHoles = 0; if ( rings.size() > 1 ) { nHoles = rings.size() - 1; holes = new GEOSGeometry*[ nHoles ]; if ( !holes ) return 0; for ( int i = 0; i < nHoles; i++ ) holes[i] = rings[i+1]; } GEOSGeometry *geom = GEOSGeom_createPolygon_r( geosinit.ctxt, shell, holes, nHoles ); if ( holes ) delete [] holes; return geom; } static GEOSGeometry *createGeosPolygon( GEOSGeometry *shell ) { return createGeosPolygon( QVector<GEOSGeometry*>() << shell ); } static GEOSGeometry *createGeosPolygon( const QgsPolygon& polygon ) { if ( polygon.count() == 0 ) return 0; QVector<GEOSGeometry *> geoms; try { for ( int i = 0; i < polygon.count(); i++ ) { GEOSGeometry *ring = createGeosLinearRing( polygon[i] ); if ( !ring ) { // something went really wrong - exit for ( int j = 0; j < geoms.count(); j++ ) GEOSGeom_destroy_r( geosinit.ctxt, geoms[j] ); // XXX [MD] we just silently return here - but we shouldn't // this is just because maptopixel simplification can return invalid linear rings return 0; } geoms << ring; } return createGeosPolygon( geoms ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); for ( int i = 0; i < geoms.count(); i++ ) GEOSGeom_destroy_r( geosinit.ctxt, geoms[i] ); return 0; } } static QgsGeometry *fromGeosGeom( GEOSGeometry *geom ) { if ( !geom ) return 0; QgsGeometry *g = new QgsGeometry; g->fromGeos( geom ); return g; } QgsGeometry* QgsGeometry::fromWkt( QString wkt ) { try { GEOSWKTReader *reader = GEOSWKTReader_create_r( geosinit.ctxt ); QgsGeometry *g = fromGeosGeom( GEOSWKTReader_read_r( geosinit.ctxt, reader, wkt.toLocal8Bit().data() ) ); GEOSWKTReader_destroy_r( geosinit.ctxt, reader ); return g; } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); return 0; } } QgsGeometry* QgsGeometry::fromPoint( const QgsPoint& point ) { return fromGeosGeom( createGeosPoint( point ) ); } QgsGeometry* QgsGeometry::fromPolyline( const QgsPolyline& polyline ) { return fromGeosGeom( createGeosLineString( polyline ) ); } QgsGeometry* QgsGeometry::fromPolygon( const QgsPolygon& polygon ) { return fromGeosGeom( createGeosPolygon( polygon ) ); } QgsGeometry* QgsGeometry::fromMultiPoint( const QgsMultiPoint& multipoint ) { QVector<GEOSGeometry *> geoms; try { for ( int i = 0; i < multipoint.size(); ++i ) geoms << createGeosPoint( multipoint[i] ); return fromGeosGeom( createGeosCollection( GEOS_MULTIPOINT, geoms ) ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); for ( int i = 0; i < geoms.size(); ++i ) GEOSGeom_destroy_r( geosinit.ctxt, geoms[i] ); return 0; } } QgsGeometry* QgsGeometry::fromMultiPolyline( const QgsMultiPolyline& multiline ) { QVector<GEOSGeometry *> geoms; try { for ( int i = 0; i < multiline.count(); i++ ) geoms << createGeosLineString( multiline[i] ); return fromGeosGeom( createGeosCollection( GEOS_MULTILINESTRING, geoms ) ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); for ( int i = 0; i < geoms.count(); i++ ) GEOSGeom_destroy_r( geosinit.ctxt, geoms[i] ); return 0; } } QgsGeometry* QgsGeometry::fromMultiPolygon( const QgsMultiPolygon& multipoly ) { if ( multipoly.count() == 0 ) return 0; QVector<GEOSGeometry *> geoms; try { for ( int i = 0; i < multipoly.count(); i++ ) geoms << createGeosPolygon( multipoly[i] ); return fromGeosGeom( createGeosCollection( GEOS_MULTIPOLYGON, geoms ) ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); for ( int i = 0; i < geoms.count(); i++ ) GEOSGeom_destroy_r( geosinit.ctxt, geoms[i] ); return 0; } } QgsGeometry* QgsGeometry::fromRect( const QgsRectangle& rect ) { QgsPolyline ring; ring.append( QgsPoint( rect.xMinimum(), rect.yMinimum() ) ); ring.append( QgsPoint( rect.xMaximum(), rect.yMinimum() ) ); ring.append( QgsPoint( rect.xMaximum(), rect.yMaximum() ) ); ring.append( QgsPoint( rect.xMinimum(), rect.yMaximum() ) ); ring.append( QgsPoint( rect.xMinimum(), rect.yMinimum() ) ); QgsPolygon polygon; polygon.append( ring ); return fromPolygon( polygon ); } QgsGeometry *QgsGeometry::fromQPointF( const QPointF &point ) { return fromGeosGeom( createGeosPoint( point.x(), point.y() ) ); } QgsGeometry *QgsGeometry::fromQPolygonF( const QPolygonF &polygon ) { if ( polygon.isClosed() ) { return QgsGeometry::fromPolygon( createPolygonFromQPolygonF( polygon ) ); } else { return QgsGeometry::fromPolyline( createPolylineFromQPolygonF( polygon ) ); } } QgsPolygon QgsGeometry::createPolygonFromQPolygonF( const QPolygonF &polygon ) { QgsPolygon result; result << createPolylineFromQPolygonF( polygon ); return result; } QgsPolyline QgsGeometry::createPolylineFromQPolygonF( const QPolygonF &polygon ) { QgsPolyline result; QPolygonF::const_iterator it = polygon.constBegin(); for ( ; it != polygon.constEnd(); ++it ) { result.append( QgsPoint( *it ) ); } return result; } QgsGeometry & QgsGeometry::operator=( QgsGeometry const & rhs ) { if ( &rhs == this ) return *this; // remove old geometry if it exists if ( mGeometry ) { delete [] mGeometry; mGeometry = 0; } mGeometrySize = rhs.mGeometrySize; // deep-copy the GEOS Geometry if appropriate GEOSGeom_destroy_r( geosinit.ctxt, mGeos ); mGeos = rhs.mGeos ? GEOSGeom_clone_r( geosinit.ctxt, rhs.mGeos ) : 0; mDirtyGeos = rhs.mDirtyGeos; mDirtyWkb = rhs.mDirtyWkb; if ( mGeometrySize && rhs.mGeometry ) { mGeometry = new unsigned char[mGeometrySize]; memcpy( mGeometry, rhs.mGeometry, mGeometrySize ); } return *this; } // QgsGeometry::operator=( QgsGeometry const & rhs ) void QgsGeometry::fromWkb( unsigned char *wkb, size_t length ) { // delete any existing WKB geometry before assigning new one if ( mGeometry ) { delete [] mGeometry; mGeometry = 0; } if ( mGeos ) { GEOSGeom_destroy_r( geosinit.ctxt, mGeos ); mGeos = 0; } mGeometry = wkb; mGeometrySize = length; mDirtyWkb = false; mDirtyGeos = true; } const unsigned char *QgsGeometry::asWkb() const { if ( mDirtyWkb ) exportGeosToWkb(); return mGeometry; } size_t QgsGeometry::wkbSize() const { if ( mDirtyWkb ) exportGeosToWkb(); return mGeometrySize; } const GEOSGeometry* QgsGeometry::asGeos() const { if ( mDirtyGeos ) { if ( !exportWkbToGeos() ) { return 0; } } return mGeos; } QGis::WkbType QgsGeometry::wkbType() const { QgsConstWkbPtr wkbPtr( asWkb() + 1 ); // ensure that wkb representation exists if ( mGeometry && wkbSize() >= 5 ) { QGis::WkbType wkbType; wkbPtr >> wkbType; return wkbType; } else { return QGis::WKBUnknown; } } QGis::GeometryType QgsGeometry::type() const { if ( mDirtyWkb ) exportGeosToWkb(); switch ( wkbType() ) { case QGis::WKBPoint: case QGis::WKBPoint25D: case QGis::WKBMultiPoint: case QGis::WKBMultiPoint25D: return QGis::Point; case QGis::WKBLineString: case QGis::WKBLineString25D: case QGis::WKBMultiLineString: case QGis::WKBMultiLineString25D: return QGis::Line; case QGis::WKBPolygon: case QGis::WKBPolygon25D: case QGis::WKBMultiPolygon: case QGis::WKBMultiPolygon25D: return QGis::Polygon; default: return QGis::UnknownGeometry; } } bool QgsGeometry::isMultipart() const { if ( mDirtyWkb ) exportGeosToWkb(); return QGis::isMultiType( wkbType() ); } void QgsGeometry::fromGeos( GEOSGeometry *geos ) { // TODO - make this more heap-friendly if ( mGeos ) { GEOSGeom_destroy_r( geosinit.ctxt, mGeos ); mGeos = 0; } if ( mGeometry ) { delete [] mGeometry; mGeometry = 0; } mGeos = geos; mDirtyWkb = true; mDirtyGeos = false; } QgsPoint QgsGeometry::closestVertex( const QgsPoint& point, int& atVertex, int& beforeVertex, int& afterVertex, double& sqrDist ) const { // TODO: implement with GEOS if ( mDirtyWkb ) exportGeosToWkb(); if ( !mGeometry ) { QgsDebugMsg( "WKB geometry not available!" ); return QgsPoint( 0, 0 ); } double actdist = std::numeric_limits<double>::max(); beforeVertex = -1; afterVertex = -1; QgsWkbPtr wkbPtr( mGeometry + 1 ); QGis::WkbType wkbType; wkbPtr >> wkbType; QgsPoint p; bool hasZValue = false; int vertexnr = -1; switch ( wkbType ) { case QGis::WKBPoint25D: hasZValue = true; //intentional fall-through case QGis::WKBPoint: { double x, y; wkbPtr >> x >> y; p.set( x, y ); actdist = point.sqrDist( x, y ); vertexnr = 0; break; } case QGis::WKBLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBLineString: { int nPoints; wkbPtr >> nPoints; for ( int index = 0; index < nPoints; ++index ) { double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); double dist = point.sqrDist( x, y ); if ( dist < actdist ) { p.set( x, y ); actdist = dist; vertexnr = index; beforeVertex = index - 1; afterVertex = index == nPoints - 1 ? -1 : index + 1; } } break; } case QGis::WKBPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBPolygon: { int nRings; wkbPtr >> nRings; for ( int index = 0, pointIndex = 0; index < nRings; ++index ) { int nPoints; wkbPtr >> nPoints; for ( int index2 = 0; index2 < nPoints; ++index2 ) { double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); double dist = point.sqrDist( x, y ); if ( dist < actdist ) { p.set( x, y ); actdist = dist; vertexnr = pointIndex; // assign the rubberband indices if ( index2 == 0 ) { beforeVertex = pointIndex + ( nPoints - 2 ); afterVertex = pointIndex + 1; } else if ( index2 == nPoints - 1 ) { beforeVertex = pointIndex - 1; afterVertex = pointIndex - ( nPoints - 2 ); } else { beforeVertex = pointIndex - 1; afterVertex = pointIndex + 1; } } ++pointIndex; } } break; } case QGis::WKBMultiPoint25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPoint: { int nPoints; wkbPtr >> nPoints; for ( int index = 0; index < nPoints; ++index ) { wkbPtr += 1 + sizeof( int ); // skip endian and point type double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); double dist = point.sqrDist( x, y ); if ( dist < actdist ) { p.set( x, y ); actdist = dist; vertexnr = index; } } break; } case QGis::WKBMultiLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiLineString: { int nLines; wkbPtr >> nLines; for ( int index = 0, pointIndex = 0; index < nLines; ++index ) { wkbPtr += 1 + sizeof( int ); int nPoints; wkbPtr >> nPoints; for ( int index2 = 0; index2 < nPoints; ++index2 ) { double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); double dist = point.sqrDist( x, y ); if ( dist < actdist ) { p.set( x, y ); actdist = dist; vertexnr = pointIndex; if ( index2 == 0 )//assign the rubber band indices beforeVertex = -1; else beforeVertex = vertexnr - 1; if ( index2 == nPoints - 1 ) afterVertex = -1; else afterVertex = vertexnr + 1; } ++pointIndex; } } break; } case QGis::WKBMultiPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPolygon: { int nPolys; wkbPtr >> nPolys; for ( int index = 0, pointIndex = 0; index < nPolys; ++index ) { wkbPtr += 1 + sizeof( int ); //skip endian and polygon type int nRings; wkbPtr >> nRings; for ( int index2 = 0; index2 < nRings; ++index2 ) { int nPoints; wkbPtr >> nPoints; for ( int index3 = 0; index3 < nPoints; ++index3 ) { double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); double dist = point.sqrDist( x, y ); if ( dist < actdist ) { p.set( x, y ); actdist = dist; vertexnr = pointIndex; //assign the rubber band indices if ( index3 == 0 ) { beforeVertex = pointIndex + ( nPoints - 2 ); afterVertex = pointIndex + 1; } else if ( index3 == nPoints - 1 ) { beforeVertex = pointIndex - 1; afterVertex = pointIndex - ( nPoints - 2 ); } else { beforeVertex = pointIndex - 1; afterVertex = pointIndex + 1; } } ++pointIndex; } } } break; } default: break; } sqrDist = actdist; atVertex = vertexnr; return p; } void QgsGeometry::adjacentVertices( int atVertex, int& beforeVertex, int& afterVertex ) const { // TODO: implement with GEOS if ( mDirtyWkb ) exportGeosToWkb(); beforeVertex = -1; afterVertex = -1; if ( !mGeometry ) { QgsDebugMsg( "WKB geometry not available!" ); return; } if ( atVertex < 0 ) return; QGis::WkbType wkbType; bool hasZValue = false; QgsWkbPtr wkbPtr( mGeometry + 1 ); wkbPtr >> wkbType; switch ( wkbType ) { case QGis::WKBPoint: { // NOOP - Points do not have adjacent verticies break; } case QGis::WKBLineString25D: case QGis::WKBLineString: { int nPoints; wkbPtr >> nPoints; if ( atVertex >= nPoints ) return; const int index = atVertex; // assign the rubber band indices beforeVertex = index - 1; if ( index == nPoints - 1 ) afterVertex = -1; else afterVertex = index + 1; break; } case QGis::WKBPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBPolygon: { int nRings; wkbPtr >> nRings; for ( int index0 = 0, pointIndex = 0; index0 < nRings; ++index0 ) { int nPoints; wkbPtr >> nPoints; for ( int index1 = 0; index1 < nPoints; ++index1 ) { wkbPtr += ( hasZValue ? 3 : 2 ) * sizeof( double ); if ( pointIndex == atVertex ) { if ( index1 == 0 ) { beforeVertex = pointIndex + ( nPoints - 2 ); afterVertex = pointIndex + 1; } else if ( index1 == nPoints - 1 ) { beforeVertex = pointIndex - 1; afterVertex = pointIndex - ( nPoints - 2 ); } else { beforeVertex = pointIndex - 1; afterVertex = pointIndex + 1; } } ++pointIndex; } } break; } case QGis::WKBMultiPoint25D: case QGis::WKBMultiPoint: { // NOOP - Points do not have adjacent verticies break; } case QGis::WKBMultiLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiLineString: { int nLines; wkbPtr >> nLines; for ( int index0 = 0, pointIndex = 0; index0 < nLines; ++index0 ) { wkbPtr += 1 + sizeof( int ); int nPoints; wkbPtr >> nPoints; for ( int index1 = 0; index1 < nPoints; ++index1 ) { wkbPtr += ( hasZValue ? 3 : 2 ) * sizeof( double ); if ( pointIndex == atVertex ) { // Found the vertex of the linestring we were looking for. if ( index1 == 0 ) beforeVertex = -1; else beforeVertex = pointIndex - 1; if ( index1 == nPoints - 1 ) afterVertex = -1; else afterVertex = pointIndex + 1; } ++pointIndex; } } break; } case QGis::WKBMultiPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPolygon: { int nPolys; wkbPtr >> nPolys; for ( int index0 = 0, pointIndex = 0; index0 < nPolys; ++index0 ) { wkbPtr += 1 + sizeof( int ); //skip endian and polygon type int nRings; wkbPtr >> nRings; for ( int index1 = 0; index1 < nRings; ++index1 ) { int nPoints; wkbPtr >> nPoints; for ( int index2 = 0; index2 < nPoints; ++index2 ) { wkbPtr += ( hasZValue ? 3 : 2 ) * sizeof( double ); if ( pointIndex == atVertex ) { // Found the vertex of the linear-ring of the polygon we were looking for. // assign the rubber band indices if ( index2 == 0 ) { beforeVertex = pointIndex + ( nPoints - 2 ); afterVertex = pointIndex + 1; } else if ( index2 == nPoints - 1 ) { beforeVertex = pointIndex - 1; afterVertex = pointIndex - ( nPoints - 2 ); } else { beforeVertex = pointIndex - 1; afterVertex = pointIndex + 1; } } ++pointIndex; } } } break; } default: break; } // switch (wkbType) } bool QgsGeometry::insertVertex( double x, double y, int beforeVertex, const GEOSCoordSequence *old_sequence, GEOSCoordSequence **new_sequence ) { // Bounds checking if ( beforeVertex < 0 ) { *new_sequence = 0; return false; } unsigned int numPoints; GEOSCoordSeq_getSize_r( geosinit.ctxt, old_sequence, &numPoints ); *new_sequence = GEOSCoordSeq_create_r( geosinit.ctxt, numPoints + 1, 2 ); if ( !*new_sequence ) return false; bool inserted = false; for ( unsigned int i = 0, j = 0; i < numPoints; i++, j++ ) { // Do we insert the new vertex here? if ( beforeVertex == static_cast<int>( i ) ) { GEOSCoordSeq_setX_r( geosinit.ctxt, *new_sequence, j, x ); GEOSCoordSeq_setY_r( geosinit.ctxt, *new_sequence, j, y ); j++; inserted = true; } double aX, aY; GEOSCoordSeq_getX_r( geosinit.ctxt, old_sequence, i, &aX ); GEOSCoordSeq_getY_r( geosinit.ctxt, old_sequence, i, &aY ); GEOSCoordSeq_setX_r( geosinit.ctxt, *new_sequence, j, aX ); GEOSCoordSeq_setY_r( geosinit.ctxt, *new_sequence, j, aY ); } if ( !inserted ) { // The beforeVertex is greater than the actual number of vertices // in the geometry - append it. GEOSCoordSeq_setX_r( geosinit.ctxt, *new_sequence, numPoints, x ); GEOSCoordSeq_setY_r( geosinit.ctxt, *new_sequence, numPoints, y ); } // TODO: Check that the sequence is still simple, e.g. with GEOS_GEOM::Geometry->isSimple() return inserted; } bool QgsGeometry::moveVertex( QgsWkbPtr &wkbPtr, const double &x, const double &y, int atVertex, bool hasZValue, int &pointIndex, bool isRing ) { int nPoints; wkbPtr >> nPoints; const int ps = ( hasZValue ? 3 : 2 ) * sizeof( double ); // Not this linestring/ring? if ( atVertex >= pointIndex + nPoints ) { wkbPtr += ps * nPoints; pointIndex += nPoints; return false; } if ( isRing && atVertex == pointIndex + nPoints - 1 ) atVertex = pointIndex; // Goto point in this linestring/ring wkbPtr += ps * ( atVertex - pointIndex ); wkbPtr << x << y; if ( hasZValue ) wkbPtr << 0.0; if ( isRing && atVertex == pointIndex ) { wkbPtr += ps * ( nPoints - 2 ); wkbPtr << x << y; if ( hasZValue ) wkbPtr << 0.0; } return true; } bool QgsGeometry::moveVertex( double x, double y, int atVertex ) { if ( atVertex < 0 ) return false; if ( mDirtyWkb ) exportGeosToWkb(); if ( !mGeometry ) { QgsDebugMsg( "WKB geometry not available!" ); return false; } QGis::WkbType wkbType; bool hasZValue = false; QgsWkbPtr wkbPtr( mGeometry + 1 ); wkbPtr >> wkbType; switch ( wkbType ) { case QGis::WKBPoint25D: hasZValue = true; //intentional fall-through case QGis::WKBPoint: { if ( atVertex != 0 ) return false; wkbPtr << x << y; mDirtyGeos = true; return true; } case QGis::WKBLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBLineString: { int pointIndex = 0; if ( moveVertex( wkbPtr, x, y, atVertex, hasZValue, pointIndex, false ) ) { mDirtyGeos = true; return true; } return false; } case QGis::WKBMultiPoint25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPoint: { int nPoints; wkbPtr >> nPoints; if ( atVertex < nPoints ) { wkbPtr += atVertex * ( 1 + sizeof( int ) + ( hasZValue ? 3 : 2 ) * sizeof( double ) ) + 1 + sizeof( int ); wkbPtr << x << y; if ( hasZValue ) wkbPtr << 0.0; mDirtyGeos = true; return true; } else { return false; } } case QGis::WKBMultiLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiLineString: { int nLines; wkbPtr >> nLines; for ( int linenr = 0, pointIndex = 0; linenr < nLines; ++linenr ) { wkbPtr += 1 + sizeof( int ); if ( moveVertex( wkbPtr, x, y, atVertex, hasZValue, pointIndex, false ) ) { mDirtyGeos = true; return true; } } return false; } case QGis::WKBPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBPolygon: { int nLines; wkbPtr >> nLines; for ( int linenr = 0, pointIndex = 0; linenr < nLines; ++linenr ) { if ( moveVertex( wkbPtr, x, y, atVertex, hasZValue, pointIndex, true ) ) { mDirtyGeos = true; return true; } } return false; } case QGis::WKBMultiPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPolygon: { int nPolygons; wkbPtr >> nPolygons; for ( int polynr = 0, pointIndex = 0; polynr < nPolygons; ++polynr ) { wkbPtr += 1 + sizeof( int ); // skip endian and polygon type int nRings; wkbPtr >> nRings; for ( int ringnr = 0; ringnr < nRings; ++ringnr ) { if ( moveVertex( wkbPtr, x, y, atVertex, hasZValue, pointIndex, true ) ) { mDirtyGeos = true; return true; } } } return false; } default: return false; } } // copy vertices from srcPtr to dstPtr and skip/delete one vertex // @param srcPtr ring/part starting with number of points (adjusted in each call) // @param dstPtr ring/part to copy to (adjusted in each call) // @param atVertex index of vertex to skip // @param hasZValue points have 3 elements // @param pointIndex reference to index of first ring/part vertex in overall object (adjusted in each call) // @param isRing srcPtr points to a ring // @param lastItem last ring/part, atVertex after this one must be wrong // @return // 0 no delete was done // 1 "normal" delete was done // 2 last element of the ring/part was deleted int QgsGeometry::deleteVertex( QgsConstWkbPtr &srcPtr, QgsWkbPtr &dstPtr, int atVertex, bool hasZValue, int &pointIndex, bool isRing, bool lastItem ) { QgsDebugMsg( QString( "atVertex:%1 hasZValue:%2 pointIndex:%3 isRing:%4" ).arg( atVertex ).arg( hasZValue ).arg( pointIndex ).arg( isRing ) ); const int ps = ( hasZValue ? 3 : 2 ) * sizeof( double ); int nPoints; srcPtr >> nPoints; // copy complete ring/part if vertex is in a following one if ( atVertex < pointIndex || atVertex >= pointIndex + nPoints ) { // atVertex does not exist if ( lastItem && atVertex >= pointIndex + nPoints ) return 0; dstPtr << nPoints; int len = nPoints * ps; memcpy( dstPtr, srcPtr, len ); dstPtr += len; srcPtr += len; pointIndex += nPoints; return 0; } // delete the first vertex of a ring instead of the last if ( isRing && atVertex == pointIndex + nPoints - 1 ) atVertex = pointIndex; if ( nPoints == ( isRing ? 2 : 1 ) ) { // last point of the part/ring is deleted // skip the whole part/ring srcPtr += nPoints * ps; pointIndex += nPoints; return 2; } dstPtr << nPoints - 1; // copy ring before vertex int len = ( atVertex - pointIndex ) * ps; if ( len > 0 ) { memcpy( dstPtr, srcPtr, len ); dstPtr += len; srcPtr += len; } // skip deleted vertex srcPtr += ps; // copy reset of ring len = ( pointIndex + nPoints - atVertex - 1 ) * ps; // save position of vertex, if we delete the first vertex of a ring const unsigned char *first = 0; if ( isRing && atVertex == pointIndex ) { len -= ps; first = srcPtr; } if ( len > 0 ) { memcpy( dstPtr, srcPtr, len ); dstPtr += len; srcPtr += len; } // copy new first vertex instead of the old last, if we deleted the original first vertex if ( first ) { memcpy( dstPtr, first, ps ); dstPtr += ps; srcPtr += ps; } pointIndex += nPoints; return 1; } bool QgsGeometry::deleteVertex( int atVertex ) { QgsDebugMsg( QString( "atVertex:%1" ).arg( atVertex ) ); if ( atVertex < 0 ) return false; if ( mDirtyWkb ) exportGeosToWkb(); if ( !mGeometry ) { QgsDebugMsg( "WKB geometry not available!" ); return false; } QgsConstWkbPtr srcPtr( mGeometry ); char endianness; QGis::WkbType wkbType; srcPtr >> endianness >> wkbType; bool hasZValue = QGis::wkbDimensions( wkbType ) == 3; int ps = ( hasZValue ? 3 : 2 ) * sizeof( double ); if ( QGis::flatType( wkbType ) == QGis::WKBMultiPoint ) ps += 1 + sizeof( int ); unsigned char *dstBuffer = new unsigned char[mGeometrySize - ps]; QgsWkbPtr dstPtr( dstBuffer ); dstPtr << endianness << wkbType; bool deleted = false; switch ( wkbType ) { case QGis::WKBPoint25D: case QGis::WKBPoint: break; //cannot remove the only point vertex case QGis::WKBLineString25D: case QGis::WKBLineString: { int pointIndex = 0; int res = deleteVertex( srcPtr, dstPtr, atVertex, hasZValue, pointIndex, false, true ); if ( res == 2 ) { // Linestring with 0 points dstPtr << 0; } deleted = res != 0; break; } case QGis::WKBPolygon25D: case QGis::WKBPolygon: { int nRings; srcPtr >> nRings; QgsWkbPtr ptrN( dstPtr ); dstPtr << nRings; for ( int ringnr = 0, pointIndex = 0; ringnr < nRings; ++ringnr ) { int res = deleteVertex( srcPtr, dstPtr, atVertex, hasZValue, pointIndex, true, ringnr == nRings - 1 ); if ( res == 2 ) ptrN << nRings - 1; deleted |= res != 0; } break; } case QGis::WKBMultiPoint25D: case QGis::WKBMultiPoint: { int nPoints; srcPtr >> nPoints; if ( atVertex < nPoints ) { dstPtr << nPoints - 1; int len = ps * atVertex; if ( len > 0 ) { memcpy( dstPtr, srcPtr, len ); srcPtr += len; dstPtr += len; } srcPtr += ps; len = ps * ( nPoints - atVertex - 1 ); if ( len > 0 ) { memcpy( dstPtr, srcPtr, len ); srcPtr += len; dstPtr += len; } deleted = true; } break; } case QGis::WKBMultiLineString25D: case QGis::WKBMultiLineString: { int nLines; srcPtr >> nLines; QgsWkbPtr ptrN( dstPtr ); dstPtr << nLines; for ( int linenr = 0, pointIndex = 0; linenr < nLines; ++linenr ) { QgsWkbPtr saveDstPtr( dstPtr ); srcPtr >> endianness >> wkbType; dstPtr << endianness << wkbType; int res = deleteVertex( srcPtr, dstPtr, atVertex, hasZValue, pointIndex, false, linenr == nLines - 1 ); if ( res == 2 ) { // line string was completely removed ptrN << nLines - 1; dstPtr = saveDstPtr; } deleted |= res != 0; } break; } case QGis::WKBMultiPolygon25D: case QGis::WKBMultiPolygon: { int nPolys; srcPtr >> nPolys; QgsWkbPtr ptrNPolys( dstPtr ); dstPtr << nPolys; for ( int polynr = 0, pointIndex = 0; polynr < nPolys; ++polynr ) { int nRings; srcPtr >> endianness >> wkbType >> nRings; QgsWkbPtr saveDstPolyPtr( dstPtr ); dstPtr << endianness << wkbType; QgsWkbPtr ptrNRings( dstPtr ); dstPtr << nRings; for ( int ringnr = 0; ringnr < nRings; ++ringnr ) { int res = deleteVertex( srcPtr, dstPtr, atVertex, hasZValue, pointIndex, true, polynr == nPolys - 1 && ringnr == nRings - 1 ); if ( res == 2 ) { // ring was completely removed if ( nRings == 1 ) { // last ring => remove polygon ptrNPolys << nPolys - 1; dstPtr = saveDstPolyPtr; } else { ptrNRings << nRings - 1; } } deleted |= res != 0; } } break; } case QGis::WKBNoGeometry: case QGis::WKBUnknown: break; } if ( deleted ) { delete [] mGeometry; mGeometry = dstBuffer; mGeometrySize -= ps; mDirtyGeos = true; return true; } else { delete [] dstBuffer; return false; }<|fim▁hole|>bool QgsGeometry::insertVertex( QgsConstWkbPtr &srcPtr, QgsWkbPtr &dstPtr, int beforeVertex, const double &x, const double &y, bool hasZValue, int &pointIndex, bool isRing ) { int nPoints; srcPtr >> nPoints; bool insertHere = beforeVertex >= pointIndex && beforeVertex < pointIndex + nPoints; int len; if ( insertHere ) { dstPtr << nPoints + 1; len = ( hasZValue ? 3 : 2 ) * ( beforeVertex - pointIndex ) * sizeof( double ); if ( len > 0 ) { memcpy( dstPtr, srcPtr, len ); srcPtr += len; dstPtr += len; } dstPtr << x << y; if ( hasZValue ) dstPtr << 0.0; len = ( hasZValue ? 3 : 2 ) * ( pointIndex + nPoints - beforeVertex ) * sizeof( double ); if ( isRing && beforeVertex == pointIndex ) len -= ( hasZValue ? 3 : 2 ) * sizeof( double ); } else { dstPtr << nPoints; len = ( hasZValue ? 3 : 2 ) * nPoints * sizeof( double ); } memcpy( dstPtr, srcPtr, len ); srcPtr += len; dstPtr += len; if ( isRing && beforeVertex == pointIndex ) { dstPtr << x << y; if ( hasZValue ) dstPtr << 0.0; } pointIndex += nPoints; return insertHere; } bool QgsGeometry::insertVertex( double x, double y, int beforeVertex ) { // TODO: implement with GEOS if ( mDirtyWkb ) exportGeosToWkb(); if ( !mGeometry ) { QgsDebugMsg( "WKB geometry not available!" ); return false; } if ( beforeVertex < 0 ) return false; QgsConstWkbPtr srcPtr( mGeometry ); char endianness; QGis::WkbType wkbType; srcPtr >> endianness >> wkbType; bool hasZValue = QGis::wkbDimensions( wkbType ) == 3; int ps = ( hasZValue ? 3 : 2 ) * sizeof( double ); if ( QGis::flatType( wkbType ) == QGis::WKBMultiPoint ) ps += 1 + sizeof( int ); unsigned char *dstBuffer = new unsigned char[mGeometrySize + ps]; QgsWkbPtr dstPtr( dstBuffer ); dstPtr << endianness << wkbType; bool inserted = false; switch ( wkbType ) { case QGis::WKBPoint25D: case QGis::WKBPoint: //cannot insert a vertex before another one on point types break; case QGis::WKBLineString25D: case QGis::WKBLineString: { int pointIndex = 0; inserted = insertVertex( srcPtr, dstPtr, beforeVertex, x, y, hasZValue, pointIndex, false ); break; } case QGis::WKBPolygon25D: case QGis::WKBPolygon: { int nRings; srcPtr >> nRings; dstPtr << nRings; for ( int ringnr = 0, pointIndex = 0; ringnr < nRings; ++ringnr ) inserted |= insertVertex( srcPtr, dstPtr, beforeVertex, x, y, hasZValue, pointIndex, true ); break; } case QGis::WKBMultiPoint25D: case QGis::WKBMultiPoint: { int nPoints; srcPtr >> nPoints; if ( beforeVertex <= nPoints ) { dstPtr << nPoints + 1; int len = ps * beforeVertex; if ( len > 0 ) { memcpy( dstPtr, srcPtr, len ); srcPtr += len; dstPtr += len; } dstPtr << endianness << ( hasZValue ? QGis::WKBPoint25D : QGis::WKBPoint ) << x << y; if ( hasZValue ) dstPtr << 0.0; len = ps * ( nPoints - beforeVertex ); if ( len > 0 ) memcpy( dstPtr, srcPtr, len ); inserted = true; } break; } case QGis::WKBMultiLineString25D: case QGis::WKBMultiLineString: { int nLines; srcPtr >> nLines; dstPtr << nLines; for ( int linenr = 0, pointIndex = 0; linenr < nLines; ++linenr ) { srcPtr >> endianness >> wkbType; dstPtr << endianness << wkbType; inserted |= insertVertex( srcPtr, dstPtr, beforeVertex, x, y, hasZValue, pointIndex, false ); } break; } case QGis::WKBMultiPolygon25D: case QGis::WKBMultiPolygon: { int nPolys; srcPtr >> nPolys; dstPtr << nPolys; for ( int polynr = 0, pointIndex = 0; polynr < nPolys; ++polynr ) { int nRings; srcPtr >> endianness >> wkbType >> nRings; dstPtr << endianness << wkbType << nRings; for ( int ringnr = 0; ringnr < nRings; ++ringnr ) inserted |= insertVertex( srcPtr, dstPtr, beforeVertex, x, y, hasZValue, pointIndex, true ); } break; } case QGis::WKBNoGeometry: case QGis::WKBUnknown: break; } if ( inserted ) { delete [] mGeometry; mGeometry = dstBuffer; mGeometrySize += ps; mDirtyGeos = true; return true; } else { delete [] dstBuffer; return false; } } QgsPoint QgsGeometry::vertexAt( int atVertex ) const { if ( atVertex < 0 ) return QgsPoint( 0, 0 ); if ( mDirtyWkb ) exportGeosToWkb(); if ( !mGeometry ) { QgsDebugMsg( "WKB geometry not available!" ); return QgsPoint( 0, 0 ); } QgsConstWkbPtr wkbPtr( mGeometry + 1 ); QGis::WkbType wkbType; wkbPtr >> wkbType; bool hasZValue = false; switch ( wkbType ) { case QGis::WKBPoint25D: case QGis::WKBPoint: { if ( atVertex != 0 ) return QgsPoint( 0, 0 ); double x, y; wkbPtr >> x >> y; return QgsPoint( x, y ); } case QGis::WKBLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBLineString: { // get number of points in the line int nPoints; wkbPtr >> nPoints; if ( atVertex >= nPoints ) return QgsPoint( 0, 0 ); // copy the vertex coordinates wkbPtr += atVertex * ( hasZValue ? 3 : 2 ) * sizeof( double ); double x, y; wkbPtr >> x >> y; return QgsPoint( x, y ); } case QGis::WKBPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBPolygon: { int nRings; wkbPtr >> nRings; for ( int ringnr = 0, pointIndex = 0; ringnr < nRings; ++ringnr ) { int nPoints; wkbPtr >> nPoints; if ( atVertex >= pointIndex + nPoints ) { wkbPtr += nPoints * ( hasZValue ? 3 : 2 ) * sizeof( double ); pointIndex += nPoints; continue; } wkbPtr += ( atVertex - pointIndex ) * ( hasZValue ? 3 : 2 ) * sizeof( double ); double x, y; wkbPtr >> x >> y; return QgsPoint( x, y ); } return QgsPoint( 0, 0 ); } case QGis::WKBMultiPoint25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPoint: { // get number of points in the line int nPoints; wkbPtr >> nPoints; if ( atVertex >= nPoints ) return QgsPoint( 0, 0 ); wkbPtr += atVertex * ( 1 + sizeof( int ) + ( hasZValue ? 3 : 2 ) * sizeof( double ) ) + 1 + sizeof( int ); double x, y; wkbPtr >> x >> y; return QgsPoint( x, y ); } case QGis::WKBMultiLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiLineString: { int nLines; wkbPtr >> nLines; for ( int linenr = 0, pointIndex = 0; linenr < nLines; ++linenr ) { wkbPtr += 1 + sizeof( int ); int nPoints; wkbPtr >> nPoints; if ( atVertex >= pointIndex + nPoints ) { wkbPtr += nPoints * ( hasZValue ? 3 : 2 ) * sizeof( double ); pointIndex += nPoints; continue; } wkbPtr += ( atVertex - pointIndex ) * ( hasZValue ? 3 : 2 ) * sizeof( double ); double x, y; wkbPtr >> x >> y; return QgsPoint( x, y ); } return QgsPoint( 0, 0 ); } case QGis::WKBMultiPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPolygon: { int nPolygons; wkbPtr >> nPolygons; for ( int polynr = 0, pointIndex = 0; polynr < nPolygons; ++polynr ) { wkbPtr += 1 + sizeof( int ); int nRings; wkbPtr >> nRings; for ( int ringnr = 0; ringnr < nRings; ++ringnr ) { int nPoints; wkbPtr >> nPoints; if ( atVertex >= pointIndex + nPoints ) { wkbPtr += nPoints * ( hasZValue ? 3 : 2 ) * sizeof( double ); pointIndex += nPoints; continue; } wkbPtr += ( atVertex - pointIndex ) * ( hasZValue ? 3 : 2 ) * sizeof( double ); double x, y; wkbPtr >> x >> y; return QgsPoint( x, y ); } } return QgsPoint( 0, 0 ); } default: QgsDebugMsg( "error: mGeometry type not recognized" ); return QgsPoint( 0, 0 ); } } double QgsGeometry::sqrDistToVertexAt( QgsPoint& point, int atVertex ) const { QgsPoint pnt = vertexAt( atVertex ); if ( pnt != QgsPoint( 0, 0 ) ) { QgsDebugMsg( "Exiting with distance to " + pnt.toString() ); return point.sqrDist( pnt ); } else { QgsDebugMsg( "Exiting with std::numeric_limits<double>::max()." ); // probably safest to bail out with a very large number return std::numeric_limits<double>::max(); } } double QgsGeometry::closestVertexWithContext( const QgsPoint& point, int& atVertex ) const { double sqrDist = std::numeric_limits<double>::max(); try { // Initialise some stuff int closestVertexIndex = 0; // set up the GEOS geometry if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return -1; const GEOSGeometry *g = GEOSGetExteriorRing_r( geosinit.ctxt, mGeos ); if ( !g ) return -1; const GEOSCoordSequence *sequence = GEOSGeom_getCoordSeq_r( geosinit.ctxt, g ); unsigned int n; GEOSCoordSeq_getSize_r( geosinit.ctxt, sequence, &n ); for ( unsigned int i = 0; i < n; i++ ) { double x, y; GEOSCoordSeq_getX_r( geosinit.ctxt, sequence, i, &x ); GEOSCoordSeq_getY_r( geosinit.ctxt, sequence, i, &y ); double testDist = point.sqrDist( x, y ); if ( testDist < sqrDist ) { closestVertexIndex = i; sqrDist = testDist; } } atVertex = closestVertexIndex; } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); return -1; } return sqrDist; } double QgsGeometry::closestSegmentWithContext( const QgsPoint& point, QgsPoint& minDistPoint, int& afterVertex, double *leftOf, double epsilon ) const { QgsDebugMsgLevel( "Entering.", 3 ); // TODO: implement with GEOS if ( mDirtyWkb ) //convert latest geos to mGeometry exportGeosToWkb(); if ( !mGeometry ) { QgsDebugMsg( "WKB geometry not available!" ); return -1; } QgsWkbPtr wkbPtr( mGeometry + 1 ); QGis::WkbType wkbType; wkbPtr >> wkbType; // Initialise some stuff double sqrDist = std::numeric_limits<double>::max(); QgsPoint distPoint; int closestSegmentIndex = 0; bool hasZValue = false; switch ( wkbType ) { case QGis::WKBPoint25D: case QGis::WKBPoint: case QGis::WKBMultiPoint25D: case QGis::WKBMultiPoint: { // Points have no lines return -1; } case QGis::WKBLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBLineString: { int nPoints; wkbPtr >> nPoints; double prevx = 0.0, prevy = 0.0; for ( int index = 0; index < nPoints; ++index ) { double thisx, thisy; wkbPtr >> thisx >> thisy; if ( hasZValue ) wkbPtr += sizeof( double ); if ( index > 0 ) { double testdist = point.sqrDistToSegment( prevx, prevy, thisx, thisy, distPoint, epsilon ); if ( testdist < sqrDist ) { closestSegmentIndex = index; sqrDist = testdist; minDistPoint = distPoint; if ( leftOf ) { *leftOf = QgsGeometry::leftOf( point.x(), point.y(), prevx, prevy, thisx, thisy ); } } } prevx = thisx; prevy = thisy; } afterVertex = closestSegmentIndex; break; } case QGis::WKBMultiLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiLineString: { int nLines; wkbPtr >> nLines; for ( int linenr = 0, pointIndex = 0; linenr < nLines; ++linenr ) { wkbPtr += 1 + sizeof( int ); int nPoints; wkbPtr >> nPoints; double prevx = 0.0, prevy = 0.0; for ( int pointnr = 0; pointnr < nPoints; ++pointnr ) { double thisx, thisy; wkbPtr >> thisx >> thisy; if ( hasZValue ) wkbPtr += sizeof( double ); if ( pointnr > 0 ) { double testdist = point.sqrDistToSegment( prevx, prevy, thisx, thisy, distPoint, epsilon ); if ( testdist < sqrDist ) { closestSegmentIndex = pointIndex; sqrDist = testdist; minDistPoint = distPoint; if ( leftOf ) { *leftOf = QgsGeometry::leftOf( point.x(), point.y(), prevx, prevy, thisx, thisy ); } } } prevx = thisx; prevy = thisy; ++pointIndex; } } afterVertex = closestSegmentIndex; break; } case QGis::WKBPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBPolygon: { int nRings; wkbPtr >> nRings; for ( int ringnr = 0, pointIndex = 0; ringnr < nRings; ++ringnr )//loop over rings { int nPoints; wkbPtr >> nPoints; double prevx = 0.0, prevy = 0.0; for ( int pointnr = 0; pointnr < nPoints; ++pointnr )//loop over points in a ring { double thisx, thisy; wkbPtr >> thisx >> thisy; if ( hasZValue ) wkbPtr += sizeof( double ); if ( pointnr > 0 ) { double testdist = point.sqrDistToSegment( prevx, prevy, thisx, thisy, distPoint, epsilon ); if ( testdist < sqrDist ) { closestSegmentIndex = pointIndex; sqrDist = testdist; minDistPoint = distPoint; if ( leftOf ) { *leftOf = QgsGeometry::leftOf( point.x(), point.y(), prevx, prevy, thisx, thisy ); } } } prevx = thisx; prevy = thisy; ++pointIndex; } } afterVertex = closestSegmentIndex; break; } case QGis::WKBMultiPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPolygon: { int nPolygons; wkbPtr >> nPolygons; for ( int polynr = 0, pointIndex = 0; polynr < nPolygons; ++polynr ) { wkbPtr += 1 + sizeof( int ); int nRings; wkbPtr >> nRings; for ( int ringnr = 0; ringnr < nRings; ++ringnr ) { int nPoints; wkbPtr >> nPoints; double prevx = 0.0, prevy = 0.0; for ( int pointnr = 0; pointnr < nPoints; ++pointnr ) { double thisx, thisy; wkbPtr >> thisx >> thisy; if ( hasZValue ) wkbPtr += sizeof( double ); if ( pointnr > 0 ) { double testdist = point.sqrDistToSegment( prevx, prevy, thisx, thisy, distPoint, epsilon ); if ( testdist < sqrDist ) { closestSegmentIndex = pointIndex; sqrDist = testdist; minDistPoint = distPoint; if ( leftOf ) { *leftOf = QgsGeometry::leftOf( point.x(), point.y(), prevx, prevy, thisx, thisy ); } } } prevx = thisx; prevy = thisy; ++pointIndex; } } } afterVertex = closestSegmentIndex; break; } case QGis::WKBUnknown: default: return -1; break; } // switch (wkbType) QgsDebugMsgLevel( QString( "Exiting with nearest point %1, dist %2." ) .arg( point.toString() ).arg( sqrDist ), 3 ); return sqrDist; } int QgsGeometry::addRing( const QList<QgsPoint>& ring ) { //bail out if this geometry is not polygon/multipolygon if ( type() != QGis::Polygon ) return 1; //test for invalid geometries if ( ring.size() < 4 ) return 3; //ring must be closed if ( ring.first() != ring.last() ) return 2; //create geos geometry from wkb if not already there if ( mDirtyGeos ) { exportWkbToGeos(); } if ( !mGeos ) { return 6; } int type = GEOSGeomTypeId_r( geosinit.ctxt, mGeos ); //Fill GEOS Polygons of the feature into list QVector<const GEOSGeometry*> polygonList; if ( wkbType() == QGis::WKBPolygon ) { if ( type != GEOS_POLYGON ) return 1; polygonList << mGeos; } else if ( wkbType() == QGis::WKBMultiPolygon ) { if ( type != GEOS_MULTIPOLYGON ) return 1; for ( int i = 0; i < GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); ++i ) polygonList << GEOSGetGeometryN_r( geosinit.ctxt, mGeos, i ); } //create new ring GEOSGeometry *newRing = 0; GEOSGeometry *newRingPolygon = 0; try { newRing = createGeosLinearRing( ring.toVector() ); if ( !GEOSisValid_r( geosinit.ctxt, newRing ) ) { throwGEOSException( "ring is invalid" ); } newRingPolygon = createGeosPolygon( newRing ); if ( !GEOSisValid_r( geosinit.ctxt, newRingPolygon ) ) { throwGEOSException( "ring is invalid" ); } } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); if ( newRingPolygon ) GEOSGeom_destroy_r( geosinit.ctxt, newRingPolygon ); else if ( newRing ) GEOSGeom_destroy_r( geosinit.ctxt, newRing ); return 3; } QVector<GEOSGeometry*> rings; int i; for ( i = 0; i < polygonList.size(); i++ ) { for ( int j = 0; j < rings.size(); j++ ) GEOSGeom_destroy_r( geosinit.ctxt, rings[j] ); rings.clear(); GEOSGeometry *shellRing = 0; GEOSGeometry *shell = 0; try { shellRing = GEOSGeom_clone_r( geosinit.ctxt, GEOSGetExteriorRing_r( geosinit.ctxt, polygonList[i] ) ); shell = createGeosPolygon( shellRing ); if ( !GEOSWithin_r( geosinit.ctxt, newRingPolygon, shell ) ) { GEOSGeom_destroy_r( geosinit.ctxt, shell ); continue; } } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); if ( shell ) GEOSGeom_destroy_r( geosinit.ctxt, shell ); else if ( shellRing ) GEOSGeom_destroy_r( geosinit.ctxt, shellRing ); GEOSGeom_destroy_r( geosinit.ctxt, newRingPolygon ); return 4; } // add outer ring rings << GEOSGeom_clone_r( geosinit.ctxt, shellRing ); GEOSGeom_destroy_r( geosinit.ctxt, shell ); // check inner rings int n = GEOSGetNumInteriorRings_r( geosinit.ctxt, polygonList[i] ); int j; for ( j = 0; j < n; j++ ) { GEOSGeometry *holeRing = 0; GEOSGeometry *hole = 0; try { holeRing = GEOSGeom_clone_r( geosinit.ctxt, GEOSGetInteriorRingN_r( geosinit.ctxt, polygonList[i], j ) ); hole = createGeosPolygon( holeRing ); if ( !GEOSDisjoint_r( geosinit.ctxt, hole, newRingPolygon ) ) { GEOSGeom_destroy_r( geosinit.ctxt, hole ); break; } } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); if ( hole ) GEOSGeom_destroy_r( geosinit.ctxt, hole ); else if ( holeRing ) GEOSGeom_destroy_r( geosinit.ctxt, holeRing ); break; } rings << GEOSGeom_clone_r( geosinit.ctxt, holeRing ); GEOSGeom_destroy_r( geosinit.ctxt, hole ); } if ( j == n ) // this is it... break; } if ( i == polygonList.size() ) { // clear rings for ( int j = 0; j < rings.size(); j++ ) GEOSGeom_destroy_r( geosinit.ctxt, rings[j] ); rings.clear(); GEOSGeom_destroy_r( geosinit.ctxt, newRingPolygon ); // no containing polygon found return 5; } rings << GEOSGeom_clone_r( geosinit.ctxt, newRing ); GEOSGeom_destroy_r( geosinit.ctxt, newRingPolygon ); GEOSGeometry *newPolygon = createGeosPolygon( rings ); if ( wkbType() == QGis::WKBPolygon ) { GEOSGeom_destroy_r( geosinit.ctxt, mGeos ); mGeos = newPolygon; } else if ( wkbType() == QGis::WKBMultiPolygon ) { QVector<GEOSGeometry*> newPolygons; for ( int j = 0; j < polygonList.size(); j++ ) { newPolygons << ( i == j ? newPolygon : GEOSGeom_clone_r( geosinit.ctxt, polygonList[j] ) ); } GEOSGeom_destroy_r( geosinit.ctxt, mGeos ); mGeos = createGeosCollection( GEOS_MULTIPOLYGON, newPolygons ); } mDirtyWkb = true; mDirtyGeos = false; return 0; } int QgsGeometry::addPart( const QList<QgsPoint> &points, QGis::GeometryType geomType ) { if ( geomType == QGis::UnknownGeometry ) { geomType = type(); } switch ( geomType ) { case QGis::Point: // only one part at a time if ( points.size() != 1 ) { QgsDebugMsg( "expected 1 point: " + QString::number( points.size() ) ); return 2; } break; case QGis::Line: // line needs to have at least two points if ( points.size() < 2 ) { QgsDebugMsg( "line must at least have two points: " + QString::number( points.size() ) ); return 2; } break; case QGis::Polygon: // polygon needs to have at least three distinct points and must be closed if ( points.size() < 4 ) { QgsDebugMsg( "polygon must at least have three distinct points and must be closed: " + QString::number( points.size() ) ); return 2; } // Polygon must be closed if ( points.first() != points.last() ) { QgsDebugMsg( "polygon not closed" ); return 2; } break; default: QgsDebugMsg( "unsupported geometry type: " + QString::number( geomType ) ); return 2; } GEOSGeometry *newPart = 0; switch ( geomType ) { case QGis::Point: newPart = createGeosPoint( points[0] ); break; case QGis::Line: newPart = createGeosLineString( points.toVector() ); break; case QGis::Polygon: { //create new polygon from ring GEOSGeometry *newRing = 0; try { newRing = createGeosLinearRing( points.toVector() ); if ( !GEOSisValid_r( geosinit.ctxt, newRing ) ) throw GEOSException( "ring invalid" ); newPart = createGeosPolygon( newRing ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); if ( newRing ) GEOSGeom_destroy_r( geosinit.ctxt, newRing ); return 2; } } break; default: QgsDebugMsg( "unsupported type: " + QString::number( type() ) ); return 2; } if ( type() == QGis::UnknownGeometry ) { fromGeos( newPart ); return 0; } return addPart( newPart ); } int QgsGeometry::addPart( QgsGeometry *newPart ) { if ( !newPart ) return 4; const GEOSGeometry * geosPart = newPart->asGeos(); return addPart( GEOSGeom_clone_r( geosinit.ctxt, geosPart ) ); } int QgsGeometry::addPart( GEOSGeometry *newPart ) { QGis::GeometryType geomType = type(); if ( !isMultipart() && !convertToMultiType() ) { QgsDebugMsg( "could not convert to multipart" ); return 1; } //create geos geometry from wkb if not already there if ( mDirtyGeos ) { exportWkbToGeos(); } if ( !mGeos ) { QgsDebugMsg( "GEOS geometry not available!" ); return 4; } int geosType = GEOSGeomTypeId_r( geosinit.ctxt, mGeos ); Q_ASSERT( newPart ); try { if ( !GEOSisValid_r( geosinit.ctxt, newPart ) ) throw GEOSException( "new part geometry invalid" ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); if ( newPart ) GEOSGeom_destroy_r( geosinit.ctxt, newPart ); QgsDebugMsg( "part invalid: " + e.what() ); return 2; } QVector<GEOSGeometry*> parts; //create new multipolygon int n = GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); int i; for ( i = 0; i < n; ++i ) { const GEOSGeometry *partN = GEOSGetGeometryN_r( geosinit.ctxt, mGeos, i ); if ( geomType == QGis::Polygon && GEOSOverlaps_r( geosinit.ctxt, partN, newPart ) ) //bail out if new polygon overlaps with existing ones break; parts << GEOSGeom_clone_r( geosinit.ctxt, partN ); } if ( i < n ) { // bailed out for ( int i = 0; i < parts.size(); i++ ) GEOSGeom_destroy_r( geosinit.ctxt, parts[i] ); QgsDebugMsg( "new polygon part overlaps" ); return 3; } int nPartGeoms = GEOSGetNumGeometries_r( geosinit.ctxt, newPart ); for ( int i = 0; i < nPartGeoms; ++i ) { parts << GEOSGeom_clone_r( geosinit.ctxt, GEOSGetGeometryN_r( geosinit.ctxt, newPart, i ) ); } GEOSGeom_destroy_r( geosinit.ctxt, newPart ); GEOSGeom_destroy_r( geosinit.ctxt, mGeos ); mGeos = createGeosCollection( geosType, parts ); mDirtyWkb = true; mDirtyGeos = false; return 0; } int QgsGeometry::transform( const QTransform& t ) { if ( mDirtyWkb ) exportGeosToWkb(); if ( !mGeometry ) { QgsDebugMsg( "WKB geometry not available!" ); return 1; } bool hasZValue = false; QgsWkbPtr wkbPtr( mGeometry + 1 ); QGis::WkbType wkbType; wkbPtr >> wkbType; switch ( wkbType ) { case QGis::WKBPoint25D: case QGis::WKBPoint: { transformVertex( wkbPtr, t, hasZValue ); } break; case QGis::WKBLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBLineString: { int nPoints; wkbPtr >> nPoints; for ( int index = 0; index < nPoints; ++index ) transformVertex( wkbPtr, t, hasZValue ); break; } case QGis::WKBPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBPolygon: { int nRings; wkbPtr >> nRings; for ( int index = 0; index < nRings; ++index ) { int nPoints; wkbPtr >> nPoints; for ( int index2 = 0; index2 < nPoints; ++index2 ) transformVertex( wkbPtr, t, hasZValue ); } break; } case QGis::WKBMultiPoint25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPoint: { int nPoints; wkbPtr >> nPoints; for ( int index = 0; index < nPoints; ++index ) { wkbPtr += 1 + sizeof( int ); transformVertex( wkbPtr, t, hasZValue ); } break; } case QGis::WKBMultiLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiLineString: { int nLines; wkbPtr >> nLines; for ( int index = 0; index < nLines; ++index ) { wkbPtr += 1 + sizeof( int ); int nPoints; wkbPtr >> nPoints; for ( int index2 = 0; index2 < nPoints; ++index2 ) transformVertex( wkbPtr, t, hasZValue ); } break; } case QGis::WKBMultiPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPolygon: { int nPolys; wkbPtr >> nPolys; for ( int index = 0; index < nPolys; ++index ) { wkbPtr += 1 + sizeof( int ); //skip endian and polygon type int nRings; wkbPtr >> nRings; for ( int index2 = 0; index2 < nRings; ++index2 ) { int nPoints; wkbPtr >> nPoints; for ( int index3 = 0; index3 < nPoints; ++index3 ) transformVertex( wkbPtr, t, hasZValue ); } } } default: break; } mDirtyGeos = true; return 0; } int QgsGeometry::translate( double dx, double dy ) { return transform( QTransform::fromTranslate( dx, dy ) ); } int QgsGeometry::rotate( double rotation, const QgsPoint& center ) { QTransform t = QTransform::fromTranslate( center.x(), center.y() ); t.rotate( -rotation ); t.translate( -center.x(), -center.y() ); return transform( t ); } int QgsGeometry::transform( const QgsCoordinateTransform& ct ) { if ( mDirtyWkb ) exportGeosToWkb(); if ( !mGeometry ) { QgsDebugMsg( "WKB geometry not available!" ); return 1; } bool hasZValue = false; QgsWkbPtr wkbPtr( mGeometry + 1 ); QGis::WkbType wkbType; wkbPtr >> wkbType; switch ( wkbType ) { case QGis::WKBPoint25D: case QGis::WKBPoint: { transformVertex( wkbPtr, ct, hasZValue ); } break; case QGis::WKBLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBLineString: { int nPoints; wkbPtr >> nPoints; for ( int index = 0; index < nPoints; ++index ) transformVertex( wkbPtr, ct, hasZValue ); break; } case QGis::WKBPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBPolygon: { int nRings; wkbPtr >> nRings; for ( int index = 0; index < nRings; ++index ) { int nPoints; wkbPtr >> nPoints; for ( int index2 = 0; index2 < nPoints; ++index2 ) transformVertex( wkbPtr, ct, hasZValue ); } break; } case QGis::WKBMultiPoint25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPoint: { int nPoints; wkbPtr >> nPoints; for ( int index = 0; index < nPoints; ++index ) { wkbPtr += 1 + sizeof( int ); transformVertex( wkbPtr, ct, hasZValue ); } break; } case QGis::WKBMultiLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiLineString: { int nLines; wkbPtr >> nLines; for ( int index = 0; index < nLines; ++index ) { wkbPtr += 1 + sizeof( int ); int nPoints; wkbPtr >> nPoints; for ( int index2 = 0; index2 < nPoints; ++index2 ) transformVertex( wkbPtr, ct, hasZValue ); } break; } case QGis::WKBMultiPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPolygon: { int nPolys; wkbPtr >> nPolys; for ( int index = 0; index < nPolys; ++index ) { wkbPtr += 1 + sizeof( int ); //skip endian and polygon type int nRings; wkbPtr >> nRings; for ( int index2 = 0; index2 < nRings; ++index2 ) { int nPoints; wkbPtr >> nPoints; for ( int index3 = 0; index3 < nPoints; ++index3 ) transformVertex( wkbPtr, ct, hasZValue ); } } } default: break; } mDirtyGeos = true; return 0; } int QgsGeometry::splitGeometry( const QList<QgsPoint>& splitLine, QList<QgsGeometry*>& newGeometries, bool topological, QList<QgsPoint> &topologyTestPoints ) { int returnCode = 0; //return if this type is point/multipoint if ( type() == QGis::Point ) { return 1; //cannot split points } //make sure, mGeos and mWkb are there and up-to-date if ( mDirtyWkb ) exportGeosToWkb(); if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 1; if ( !GEOSisValid_r( geosinit.ctxt, mGeos ) ) return 7; //make sure splitLine is valid if (( type() == QGis::Line && splitLine.size() < 1 ) || ( type() == QGis::Polygon && splitLine.size() < 2 ) ) return 1; newGeometries.clear(); try { GEOSGeometry* splitLineGeos; if ( splitLine.size() > 1 ) { splitLineGeos = createGeosLineString( splitLine.toVector() ); } else if ( splitLine.size() == 1 ) { splitLineGeos = createGeosPoint( splitLine.at( 0 ) ); } else { return 1; } if ( !GEOSisValid_r( geosinit.ctxt, splitLineGeos ) || !GEOSisSimple_r( geosinit.ctxt, splitLineGeos ) ) { GEOSGeom_destroy_r( geosinit.ctxt, splitLineGeos ); return 1; } if ( topological ) { //find out candidate points for topological corrections if ( topologicalTestPointsSplit( splitLineGeos, topologyTestPoints ) != 0 ) return 1; } //call split function depending on geometry type if ( type() == QGis::Line ) { returnCode = splitLinearGeometry( splitLineGeos, newGeometries ); GEOSGeom_destroy_r( geosinit.ctxt, splitLineGeos ); } else if ( type() == QGis::Polygon ) { returnCode = splitPolygonGeometry( splitLineGeos, newGeometries ); GEOSGeom_destroy_r( geosinit.ctxt, splitLineGeos ); } else { return 1; } } CATCH_GEOS( 2 ) return returnCode; } /**Replaces a part of this geometry with another line*/ int QgsGeometry::reshapeGeometry( const QList<QgsPoint>& reshapeWithLine ) { if ( reshapeWithLine.size() < 2 ) return 1; if ( type() == QGis::Point ) return 1; //cannot reshape points GEOSGeometry* reshapeLineGeos = createGeosLineString( reshapeWithLine.toVector() ); //make sure this geos geometry is up-to-date if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 1; //single or multi? int numGeoms = GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); if ( numGeoms == -1 ) return 1; bool isMultiGeom = false; int geosTypeId = GEOSGeomTypeId_r( geosinit.ctxt, mGeos ); if ( geosTypeId == GEOS_MULTILINESTRING || geosTypeId == GEOS_MULTIPOLYGON ) isMultiGeom = true; bool isLine = ( type() == QGis::Line ); //polygon or multipolygon? if ( !isMultiGeom ) { GEOSGeometry* reshapedGeometry; if ( isLine ) reshapedGeometry = reshapeLine( mGeos, reshapeLineGeos ); else reshapedGeometry = reshapePolygon( mGeos, reshapeLineGeos ); GEOSGeom_destroy_r( geosinit.ctxt, reshapeLineGeos ); if ( reshapedGeometry ) { GEOSGeom_destroy_r( geosinit.ctxt, mGeos ); mGeos = reshapedGeometry; mDirtyWkb = true; return 0; } else { return 1; } } else { //call reshape for each geometry part and replace mGeos with new geometry if reshape took place bool reshapeTookPlace = false; GEOSGeometry* currentReshapeGeometry = 0; GEOSGeometry** newGeoms = new GEOSGeometry*[numGeoms]; for ( int i = 0; i < numGeoms; ++i ) { if ( isLine ) currentReshapeGeometry = reshapeLine( GEOSGetGeometryN_r( geosinit.ctxt, mGeos, i ), reshapeLineGeos ); else currentReshapeGeometry = reshapePolygon( GEOSGetGeometryN_r( geosinit.ctxt, mGeos, i ), reshapeLineGeos ); if ( currentReshapeGeometry ) { newGeoms[i] = currentReshapeGeometry; reshapeTookPlace = true; } else { newGeoms[i] = GEOSGeom_clone_r( geosinit.ctxt, GEOSGetGeometryN_r( geosinit.ctxt, mGeos, i ) ); } } GEOSGeom_destroy_r( geosinit.ctxt, reshapeLineGeos ); GEOSGeometry* newMultiGeom = 0; if ( isLine ) { newMultiGeom = GEOSGeom_createCollection_r( geosinit.ctxt, GEOS_MULTILINESTRING, newGeoms, numGeoms ); } else //multipolygon { newMultiGeom = GEOSGeom_createCollection_r( geosinit.ctxt, GEOS_MULTIPOLYGON, newGeoms, numGeoms ); } delete[] newGeoms; if ( !newMultiGeom ) return 3; if ( reshapeTookPlace ) { GEOSGeom_destroy_r( geosinit.ctxt, mGeos ); mGeos = newMultiGeom; mDirtyWkb = true; return 0; } else { GEOSGeom_destroy_r( geosinit.ctxt, newMultiGeom ); return 1; } } } int QgsGeometry::makeDifference( QgsGeometry* other ) { //make sure geos geometry is up to date if ( !other ) return 1; if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 1; if ( !GEOSisValid_r( geosinit.ctxt, mGeos ) ) return 2; if ( !GEOSisSimple_r( geosinit.ctxt, mGeos ) ) return 3; //convert other geometry to geos if ( other->mDirtyGeos ) other->exportWkbToGeos(); if ( !other->mGeos ) return 4; //make geometry::difference try { if ( GEOSIntersects_r( geosinit.ctxt, mGeos, other->mGeos ) ) { //check if multitype before and after bool multiType = isMultipart(); mGeos = GEOSDifference_r( geosinit.ctxt, mGeos, other->mGeos ); mDirtyWkb = true; if ( multiType && !isMultipart() ) { convertToMultiType(); exportWkbToGeos(); } } else { return 0; //nothing to do } } CATCH_GEOS( 5 ) if ( !mGeos ) { mDirtyGeos = true; return 6; } return 0; } QgsRectangle QgsGeometry::boundingBox() const { double xmin = std::numeric_limits<double>::max(); double ymin = std::numeric_limits<double>::max(); double xmax = -std::numeric_limits<double>::max(); double ymax = -std::numeric_limits<double>::max(); // TODO: implement with GEOS if ( mDirtyWkb ) exportGeosToWkb(); if ( !mGeometry ) { QgsDebugMsg( "WKB geometry not available!" ); // Return minimal QgsRectangle QgsRectangle invalidRect; invalidRect.setMinimal(); return invalidRect; } bool hasZValue = false; QgsWkbPtr wkbPtr( mGeometry + 1 ); QGis::WkbType wkbType; wkbPtr >> wkbType; // consider endian when fetching feature type switch ( wkbType ) { case QGis::WKBPoint25D: case QGis::WKBPoint: { double x, y; wkbPtr >> x >> y; if ( x < xmin ) xmin = x; if ( x > xmax ) xmax = x; if ( y < ymin ) ymin = y; if ( y > ymax ) ymax = y; } break; case QGis::WKBMultiPoint25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPoint: { int nPoints; wkbPtr >> nPoints; for ( int idx = 0; idx < nPoints; idx++ ) { wkbPtr += 1 + sizeof( int ); double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); if ( x < xmin ) xmin = x; if ( x > xmax ) xmax = x; if ( y < ymin ) ymin = y; if ( y > ymax ) ymax = y; } break; } case QGis::WKBLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBLineString: { // get number of points in the line int nPoints; wkbPtr >> nPoints; for ( int idx = 0; idx < nPoints; idx++ ) { double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); if ( x < xmin ) xmin = x; if ( x > xmax ) xmax = x; if ( y < ymin ) ymin = y; if ( y > ymax ) ymax = y; } break; } case QGis::WKBMultiLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiLineString: { int nLines; wkbPtr >> nLines; for ( int jdx = 0; jdx < nLines; jdx++ ) { // each of these is a wbklinestring so must handle as such wkbPtr += 1 + sizeof( int ); // skip type since we know its 2 int nPoints; wkbPtr >> nPoints; for ( int idx = 0; idx < nPoints; idx++ ) { double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); if ( x < xmin ) xmin = x; if ( x > xmax ) xmax = x; if ( y < ymin ) ymin = y; if ( y > ymax ) ymax = y; } } break; } case QGis::WKBPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBPolygon: { // get number of rings in the polygon int nRings; wkbPtr >> nRings; for ( int idx = 0; idx < nRings; idx++ ) { // get number of points in the ring int nPoints; wkbPtr >> nPoints; for ( int jdx = 0; jdx < nPoints; jdx++ ) { // add points to a point array for drawing the polygon double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); if ( x < xmin ) xmin = x; if ( x > xmax ) xmax = x; if ( y < ymin ) ymin = y; if ( y > ymax ) ymax = y; } } break; } case QGis::WKBMultiPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPolygon: { // get the number of polygons int nPolygons; wkbPtr >> nPolygons; for ( int kdx = 0; kdx < nPolygons; kdx++ ) { //skip the endian and mGeometry type info and // get number of rings in the polygon wkbPtr += 1 + sizeof( int ); int nRings; wkbPtr >> nRings; for ( int idx = 0; idx < nRings; idx++ ) { // get number of points in the ring int nPoints; wkbPtr >> nPoints; for ( int jdx = 0; jdx < nPoints; jdx++ ) { // add points to a point array for drawing the polygon double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); if ( x < xmin ) xmin = x; if ( x > xmax ) xmax = x; if ( y < ymin ) ymin = y; if ( y > ymax ) ymax = y; } } } break; } default: QgsDebugMsg( QString( "Unknown WkbType %1 ENCOUNTERED" ).arg( wkbType ) ); return QgsRectangle( 0, 0, 0, 0 ); break; } return QgsRectangle( xmin, ymin, xmax, ymax ); } bool QgsGeometry::intersects( const QgsRectangle& r ) const { QgsGeometry* g = fromRect( r ); bool res = intersects( g ); delete g; return res; } bool QgsGeometry::intersects( const QgsGeometry* geometry ) const { if ( !geometry ) return false; try // geos might throw exception on error { // ensure that both geometries have geos geometry exportWkbToGeos(); geometry->exportWkbToGeos(); if ( !mGeos || !geometry->mGeos ) { QgsDebugMsg( "GEOS geometry not available!" ); return false; } return GEOSIntersects_r( geosinit.ctxt, mGeos, geometry->mGeos ); } CATCH_GEOS( false ) } bool QgsGeometry::contains( const QgsPoint* p ) const { exportWkbToGeos(); if ( !p ) { QgsDebugMsg( "pointer p is 0" ); return false; } if ( !mGeos ) { QgsDebugMsg( "GEOS geometry not available!" ); return false; } GEOSGeometry *geosPoint = 0; bool returnval = false; try { geosPoint = createGeosPoint( *p ); returnval = GEOSContains_r( geosinit.ctxt, mGeos, geosPoint ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); returnval = false; } if ( geosPoint ) GEOSGeom_destroy_r( geosinit.ctxt, geosPoint ); return returnval; } bool QgsGeometry::geosRelOp( char( *op )( GEOSContextHandle_t handle, const GEOSGeometry*, const GEOSGeometry * ), const QgsGeometry *a, const QgsGeometry *b ) { if ( !a || !b ) return false; try // geos might throw exception on error { // ensure that both geometries have geos geometry a->exportWkbToGeos(); b->exportWkbToGeos(); if ( !a->mGeos || !b->mGeos ) { QgsDebugMsg( "GEOS geometry not available!" ); return false; } return op( geosinit.ctxt, a->mGeos, b->mGeos ); } CATCH_GEOS( false ) } bool QgsGeometry::contains( const QgsGeometry* geometry ) const { return geosRelOp( GEOSContains_r, this, geometry ); } bool QgsGeometry::disjoint( const QgsGeometry* geometry ) const { return geosRelOp( GEOSDisjoint_r, this, geometry ); } bool QgsGeometry::equals( const QgsGeometry* geometry ) const { return geosRelOp( GEOSEquals_r, this, geometry ); } bool QgsGeometry::touches( const QgsGeometry* geometry ) const { return geosRelOp( GEOSTouches_r, this, geometry ); } bool QgsGeometry::overlaps( const QgsGeometry* geometry ) const { return geosRelOp( GEOSOverlaps_r, this, geometry ); } bool QgsGeometry::within( const QgsGeometry* geometry ) const { return geosRelOp( GEOSWithin_r, this, geometry ); } bool QgsGeometry::crosses( const QgsGeometry* geometry ) const { return geosRelOp( GEOSCrosses_r, this, geometry ); } QString QgsGeometry::exportToWkt( const int &precision ) const { QgsDebugMsg( "entered." ); // TODO: implement with GEOS if ( mDirtyWkb ) { exportGeosToWkb(); } if ( !mGeometry || wkbSize() < 5 ) { QgsDebugMsg( "WKB geometry not available or too short!" ); return QString::null; } bool hasZValue = false; QgsWkbPtr wkbPtr( mGeometry + 1 ); QGis::WkbType wkbType; wkbPtr >> wkbType; QString wkt; switch ( wkbType ) { case QGis::WKBPoint25D: case QGis::WKBPoint: { double x, y; wkbPtr >> x >> y; wkt += "POINT(" + qgsDoubleToString( x, precision ) + " " + qgsDoubleToString( y, precision ) + ")"; return wkt; } case QGis::WKBLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBLineString: { int nPoints; wkbPtr >> nPoints; wkt += "LINESTRING("; // get number of points in the line for ( int idx = 0; idx < nPoints; ++idx ) { double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); if ( idx != 0 ) wkt += ", "; wkt += qgsDoubleToString( x, precision ) + " " + qgsDoubleToString( y, precision ); } wkt += ")"; return wkt; } case QGis::WKBPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBPolygon: { wkt += "POLYGON("; // get number of rings in the polygon int nRings; wkbPtr >> nRings; if ( nRings == 0 ) // sanity check for zero rings in polygon return QString(); for ( int idx = 0; idx < nRings; idx++ ) { if ( idx != 0 ) wkt += ","; wkt += "("; // get number of points in the ring int nPoints; wkbPtr >> nPoints; for ( int jdx = 0; jdx < nPoints; jdx++ ) { if ( jdx != 0 ) wkt += ","; double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); wkt += qgsDoubleToString( x, precision ) + " " + qgsDoubleToString( y, precision ); } wkt += ")"; } wkt += ")"; return wkt; } case QGis::WKBMultiPoint25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPoint: { int nPoints; wkbPtr >> nPoints; wkt += "MULTIPOINT("; for ( int idx = 0; idx < nPoints; ++idx ) { wkbPtr += 1 + sizeof( int ); if ( idx != 0 ) wkt += ", "; double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); wkt += qgsDoubleToString( x, precision ) + " " + qgsDoubleToString( y, precision ); } wkt += ")"; return wkt; } case QGis::WKBMultiLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiLineString: { int nLines; wkbPtr >> nLines; wkt += "MULTILINESTRING("; for ( int jdx = 0; jdx < nLines; jdx++ ) { if ( jdx != 0 ) wkt += ", "; wkt += "("; wkbPtr += 1 + sizeof( int ); // skip type since we know its 2 int nPoints; wkbPtr >> nPoints; for ( int idx = 0; idx < nPoints; idx++ ) { if ( idx != 0 ) wkt += ", "; double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); wkt += qgsDoubleToString( x, precision ) + " " + qgsDoubleToString( y, precision ); } wkt += ")"; } wkt += ")"; return wkt; } case QGis::WKBMultiPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPolygon: { int nPolygons; wkbPtr >> nPolygons; wkt += "MULTIPOLYGON("; for ( int kdx = 0; kdx < nPolygons; kdx++ ) { if ( kdx != 0 ) wkt += ","; wkt += "("; wkbPtr += 1 + sizeof( int ); int nRings; wkbPtr >> nRings; for ( int idx = 0; idx < nRings; idx++ ) { if ( idx != 0 ) wkt += ","; wkt += "("; int nPoints; wkbPtr >> nPoints; for ( int jdx = 0; jdx < nPoints; jdx++ ) { if ( jdx != 0 ) wkt += ","; double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); wkt += qgsDoubleToString( x, precision ) + " " + qgsDoubleToString( y, precision ); } wkt += ")"; } wkt += ")"; } wkt += ")"; return wkt; } default: QgsDebugMsg( "error: mGeometry type not recognized" ); return QString::null; } } QString QgsGeometry::exportToGeoJSON( const int &precision ) const { QgsDebugMsg( "entered." ); // TODO: implement with GEOS if ( mDirtyWkb ) exportGeosToWkb(); if ( !mGeometry ) { QgsDebugMsg( "WKB geometry not available!" ); return QString::null; } QgsWkbPtr wkbPtr( mGeometry + 1 ); QGis::WkbType wkbType; wkbPtr >> wkbType; bool hasZValue = false; QString wkt; switch ( wkbType ) { case QGis::WKBPoint25D: case QGis::WKBPoint: { double x, y; wkbPtr >> x >> y; wkt += "{ \"type\": \"Point\", \"coordinates\": [" + qgsDoubleToString( x, precision ) + ", " + qgsDoubleToString( y, precision ) + "] }"; return wkt; } case QGis::WKBLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBLineString: { wkt += "{ \"type\": \"LineString\", \"coordinates\": [ "; // get number of points in the line int nPoints; wkbPtr >> nPoints; for ( int idx = 0; idx < nPoints; ++idx ) { if ( idx != 0 ) wkt += ", "; double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); wkt += "[" + qgsDoubleToString( x, precision ) + ", " + qgsDoubleToString( y, precision ) + "]"; } wkt += " ] }"; return wkt; } case QGis::WKBPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBPolygon: { wkt += "{ \"type\": \"Polygon\", \"coordinates\": [ "; // get number of rings in the polygon int nRings; wkbPtr >> nRings; if ( nRings == 0 ) // sanity check for zero rings in polygon return QString(); for ( int idx = 0; idx < nRings; idx++ ) { if ( idx != 0 ) wkt += ", "; wkt += "[ "; // get number of points in the ring int nPoints; wkbPtr >> nPoints; for ( int jdx = 0; jdx < nPoints; jdx++ ) { if ( jdx != 0 ) wkt += ", "; double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); wkt += "[" + qgsDoubleToString( x, precision ) + ", " + qgsDoubleToString( y, precision ) + "]"; } wkt += " ]"; } wkt += " ] }"; return wkt; } case QGis::WKBMultiPoint25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPoint: { wkt += "{ \"type\": \"MultiPoint\", \"coordinates\": [ "; int nPoints; wkbPtr >> nPoints; for ( int idx = 0; idx < nPoints; ++idx ) { wkbPtr += 1 + sizeof( int ); if ( idx != 0 ) wkt += ", "; double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); wkt += "[" + qgsDoubleToString( x, precision ) + ", " + qgsDoubleToString( y, precision ) + "]"; } wkt += " ] }"; return wkt; } case QGis::WKBMultiLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiLineString: { wkt += "{ \"type\": \"MultiLineString\", \"coordinates\": [ "; int nLines; wkbPtr >> nLines; for ( int jdx = 0; jdx < nLines; jdx++ ) { if ( jdx != 0 ) wkt += ", "; wkt += "[ "; wkbPtr += 1 + sizeof( int ); // skip type since we know its 2 int nPoints; wkbPtr >> nPoints; for ( int idx = 0; idx < nPoints; idx++ ) { if ( idx != 0 ) wkt += ", "; double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); wkt += "[" + qgsDoubleToString( x, precision ) + ", " + qgsDoubleToString( y, precision ) + "]"; } wkt += " ]"; } wkt += " ] }"; return wkt; } case QGis::WKBMultiPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPolygon: { wkt += "{ \"type\": \"MultiPolygon\", \"coordinates\": [ "; int nPolygons; wkbPtr >> nPolygons; for ( int kdx = 0; kdx < nPolygons; kdx++ ) { if ( kdx != 0 ) wkt += ", "; wkt += "[ "; wkbPtr += 1 + sizeof( int ); int nRings; wkbPtr >> nRings; for ( int idx = 0; idx < nRings; idx++ ) { if ( idx != 0 ) wkt += ", "; wkt += "[ "; int nPoints; wkbPtr >> nPoints; for ( int jdx = 0; jdx < nPoints; jdx++ ) { if ( jdx != 0 ) wkt += ", "; double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); wkt += "[" + qgsDoubleToString( x, precision ) + ", " + qgsDoubleToString( y, precision ) + "]"; } wkt += " ]"; } wkt += " ]"; } wkt += " ] }"; return wkt; } default: QgsDebugMsg( "error: mGeometry type not recognized" ); return QString::null; } } bool QgsGeometry::exportWkbToGeos() const { QgsDebugMsgLevel( "entered.", 3 ); if ( !mDirtyGeos ) { // No need to convert again return true; } if ( mGeos ) { GEOSGeom_destroy_r( geosinit.ctxt, mGeos ); mGeos = 0; } // this probably shouldn't return true if ( !mGeometry ) { // no WKB => no GEOS mDirtyGeos = false; return true; } bool hasZValue = false; QgsWkbPtr wkbPtr( mGeometry + 1 ); QGis::WkbType wkbType; wkbPtr >> wkbType; try { switch ( wkbType ) { case QGis::WKBPoint25D: case QGis::WKBPoint: { double x, y; wkbPtr >> x >> y; mGeos = createGeosPoint( QgsPoint( x, y ) ); mDirtyGeos = false; break; } case QGis::WKBMultiPoint25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPoint: { QVector<GEOSGeometry *> points; int nPoints; wkbPtr >> nPoints; for ( int idx = 0; idx < nPoints; idx++ ) { double x, y; wkbPtr += 1 + sizeof( int ); wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); points << createGeosPoint( QgsPoint( x, y ) ); } mGeos = createGeosCollection( GEOS_MULTIPOINT, points ); mDirtyGeos = false; break; } case QGis::WKBLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBLineString: { QgsPolyline sequence; int nPoints; wkbPtr >> nPoints; for ( int idx = 0; idx < nPoints; idx++ ) { double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); sequence << QgsPoint( x, y ); } mDirtyGeos = false; mGeos = createGeosLineString( sequence ); break; } case QGis::WKBMultiLineString25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiLineString: { QVector<GEOSGeometry*> lines; int nLines; wkbPtr >> nLines; for ( int jdx = 0; jdx < nLines; jdx++ ) { QgsPolyline sequence; // each of these is a wbklinestring so must handle as such wkbPtr += 1 + sizeof( int ); // skip type since we know its 2 int nPoints; wkbPtr >> nPoints; for ( int idx = 0; idx < nPoints; idx++ ) { double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); sequence << QgsPoint( x, y ); } // ignore invalid parts, it can come from ST_Simplify operations if ( sequence.count() > 1 ) lines << createGeosLineString( sequence ); } mGeos = createGeosCollection( GEOS_MULTILINESTRING, lines ); mDirtyGeos = false; break; } case QGis::WKBPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBPolygon: { // get number of rings in the polygon int nRings; wkbPtr >> nRings; QVector<GEOSGeometry*> rings; for ( int idx = 0; idx < nRings; idx++ ) { //QgsDebugMsg("Ring nr: "+QString::number(idx)); QgsPolyline sequence; // get number of points in the ring int nPoints; wkbPtr >> nPoints; for ( int jdx = 0; jdx < nPoints; jdx++ ) { // add points to a point array for drawing the polygon double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); sequence << QgsPoint( x, y ); } GEOSGeometry *ring = createGeosLinearRing( sequence ); if ( ring ) rings << ring; } mGeos = createGeosPolygon( rings ); mDirtyGeos = false; break; } case QGis::WKBMultiPolygon25D: hasZValue = true; //intentional fall-through case QGis::WKBMultiPolygon: { QVector<GEOSGeometry*> polygons; // get the number of polygons int nPolygons; wkbPtr >> nPolygons; for ( int kdx = 0; kdx < nPolygons; kdx++ ) { //QgsDebugMsg("Polygon nr: "+QString::number(kdx)); QVector<GEOSGeometry*> rings; //skip the endian and mGeometry type info and // get number of rings in the polygon wkbPtr += 1 + sizeof( int ); int numRings; wkbPtr >> numRings; for ( int idx = 0; idx < numRings; idx++ ) { //QgsDebugMsg("Ring nr: "+QString::number(idx)); QgsPolyline sequence; // get number of points in the ring int nPoints; wkbPtr >> nPoints; for ( int jdx = 0; jdx < nPoints; jdx++ ) { // add points to a point array for drawing the polygon double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); sequence << QgsPoint( x, y ); } GEOSGeometry *ring = createGeosLinearRing( sequence ); if ( ring ) rings << ring; } GEOSGeometry *polygon = createGeosPolygon( rings ); if ( polygon ) polygons << polygon; } mGeos = createGeosCollection( GEOS_MULTIPOLYGON, polygons ); mDirtyGeos = false; break; } default: return false; } } CATCH_GEOS( false ) return true; } bool QgsGeometry::exportGeosToWkb() const { //QgsDebugMsg("entered."); if ( !mDirtyWkb ) { // No need to convert again return true; } // clear the WKB, ready to replace with the new one if ( mGeometry ) { delete [] mGeometry; mGeometry = 0; } if ( !mGeos ) { // GEOS is null, therefore WKB is null. mDirtyWkb = false; return true; } // set up byteOrder char byteOrder = QgsApplication::endian(); switch ( GEOSGeomTypeId_r( geosinit.ctxt, mGeos ) ) { case GEOS_POINT: // a point { mGeometrySize = 1 + sizeof( int ) + 2 * sizeof( double ); mGeometry = new unsigned char[mGeometrySize]; const GEOSCoordSequence *cs = GEOSGeom_getCoordSeq_r( geosinit.ctxt, mGeos ); double x, y; GEOSCoordSeq_getX_r( geosinit.ctxt, cs, 0, &x ); GEOSCoordSeq_getY_r( geosinit.ctxt, cs, 0, &y ); QgsWkbPtr wkbPtr( mGeometry ); wkbPtr << byteOrder << QGis::WKBPoint << x << y; mDirtyWkb = false; return true; } // case GEOS_GEOM::GEOS_POINT case GEOS_LINESTRING: // a linestring { //QgsDebugMsg("Got a geos::GEOS_LINESTRING."); const GEOSCoordSequence *cs = GEOSGeom_getCoordSeq_r( geosinit.ctxt, mGeos ); unsigned int nPoints; GEOSCoordSeq_getSize_r( geosinit.ctxt, cs, &nPoints ); // allocate some space for the WKB mGeometrySize = 1 + // sizeof(byte) sizeof( int ) + sizeof( int ) + (( sizeof( double ) + sizeof( double ) ) * nPoints ); mGeometry = new unsigned char[mGeometrySize]; QgsWkbPtr wkbPtr( mGeometry ); wkbPtr << byteOrder << QGis::WKBLineString << nPoints; const GEOSCoordSequence *sequence = GEOSGeom_getCoordSeq_r( geosinit.ctxt, mGeos ); // assign points for ( unsigned int n = 0; n < nPoints; n++ ) { double x, y; GEOSCoordSeq_getX_r( geosinit.ctxt, sequence, n, &x ); GEOSCoordSeq_getY_r( geosinit.ctxt, sequence, n, &y ); wkbPtr << x << y; } mDirtyWkb = false; return true; // TODO: Deal with endian-ness } // case GEOS_GEOM::GEOS_LINESTRING case GEOS_LINEARRING: // a linear ring (linestring with 1st point == last point) { // TODO break; } // case GEOS_GEOM::GEOS_LINEARRING case GEOS_POLYGON: // a polygon { int nPointsInRing = 0; //first calculate the geometry size int geometrySize = 1 + 2 * sizeof( int ); //endian, type, number of rings const GEOSGeometry *theRing = GEOSGetExteriorRing_r( geosinit.ctxt, mGeos ); if ( theRing ) { geometrySize += sizeof( int ); geometrySize += getNumGeosPoints( theRing ) * 2 * sizeof( double ); } for ( int i = 0; i < GEOSGetNumInteriorRings_r( geosinit.ctxt, mGeos ); ++i ) { geometrySize += sizeof( int ); //number of points in ring theRing = GEOSGetInteriorRingN_r( geosinit.ctxt, mGeos, i ); if ( theRing ) { geometrySize += getNumGeosPoints( theRing ) * 2 * sizeof( double ); } } mGeometry = new unsigned char[geometrySize]; mGeometrySize = geometrySize; //then fill the geometry itself into the wkb QgsWkbPtr wkbPtr( mGeometry ); int nRings = GEOSGetNumInteriorRings_r( geosinit.ctxt, mGeos ) + 1; wkbPtr << byteOrder << QGis::WKBPolygon << nRings; //exterior ring first theRing = GEOSGetExteriorRing_r( geosinit.ctxt, mGeos ); if ( theRing ) { nPointsInRing = getNumGeosPoints( theRing ); wkbPtr << nPointsInRing; const GEOSCoordSequence *cs = GEOSGeom_getCoordSeq_r( geosinit.ctxt, theRing ); unsigned int n; GEOSCoordSeq_getSize_r( geosinit.ctxt, cs, &n ); for ( unsigned int j = 0; j < n; ++j ) { double x, y; GEOSCoordSeq_getX_r( geosinit.ctxt, cs, j, &x ); GEOSCoordSeq_getY_r( geosinit.ctxt, cs, j, &y ); wkbPtr << x << y; } } //interior rings after for ( int i = 0; i < GEOSGetNumInteriorRings_r( geosinit.ctxt, mGeos ); i++ ) { theRing = GEOSGetInteriorRingN_r( geosinit.ctxt, mGeos, i ); const GEOSCoordSequence *cs = GEOSGeom_getCoordSeq_r( geosinit.ctxt, theRing ); unsigned int nPointsInRing; GEOSCoordSeq_getSize_r( geosinit.ctxt, cs, &nPointsInRing ); wkbPtr << nPointsInRing; for ( unsigned int j = 0; j < nPointsInRing; j++ ) { double x, y; GEOSCoordSeq_getX_r( geosinit.ctxt, cs, j, &x ); GEOSCoordSeq_getY_r( geosinit.ctxt, cs, j, &y ); wkbPtr << x << y; } } mDirtyWkb = false; return true; } // case GEOS_GEOM::GEOS_POLYGON break; case GEOS_MULTIPOINT: // a collection of points { // determine size of geometry int geometrySize = 1 + 2 * sizeof( int ); for ( int i = 0; i < GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); i++ ) { geometrySize += 1 + sizeof( int ) + 2 * sizeof( double ); } mGeometry = new unsigned char[geometrySize]; mGeometrySize = geometrySize; QgsWkbPtr wkbPtr( mGeometry ); int numPoints = GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); wkbPtr << byteOrder << QGis::WKBMultiPoint << numPoints; for ( int i = 0; i < GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); i++ ) { //copy endian and point type wkbPtr << byteOrder << QGis::WKBPoint; const GEOSGeometry *currentPoint = GEOSGetGeometryN_r( geosinit.ctxt, mGeos, i ); const GEOSCoordSequence *cs = GEOSGeom_getCoordSeq_r( geosinit.ctxt, currentPoint ); double x, y; GEOSCoordSeq_getX_r( geosinit.ctxt, cs, 0, &x ); GEOSCoordSeq_getY_r( geosinit.ctxt, cs, 0, &y ); wkbPtr << x << y; } mDirtyWkb = false; return true; } // case GEOS_GEOM::GEOS_MULTIPOINT case GEOS_MULTILINESTRING: // a collection of linestrings { // determine size of geometry int geometrySize = 1 + 2 * sizeof( int ); for ( int i = 0; i < GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); i++ ) { geometrySize += 1 + 2 * sizeof( int ); geometrySize += getNumGeosPoints( GEOSGetGeometryN_r( geosinit.ctxt, mGeos, i ) ) * 2 * sizeof( double ); } mGeometry = new unsigned char[geometrySize]; mGeometrySize = geometrySize; QgsWkbPtr wkbPtr( mGeometry ); int numLines = GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); wkbPtr << byteOrder << QGis::WKBMultiLineString << numLines; //loop over lines for ( int i = 0; i < GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); i++ ) { //endian and type WKBLineString wkbPtr << byteOrder << QGis::WKBLineString; const GEOSCoordSequence *cs = GEOSGeom_getCoordSeq_r( geosinit.ctxt, GEOSGetGeometryN_r( geosinit.ctxt, mGeos, i ) ); //line size unsigned int lineSize; GEOSCoordSeq_getSize_r( geosinit.ctxt, cs, &lineSize ); wkbPtr << lineSize; //vertex coordinates for ( unsigned int j = 0; j < lineSize; ++j ) { double x, y; GEOSCoordSeq_getX_r( geosinit.ctxt, cs, j, &x ); GEOSCoordSeq_getY_r( geosinit.ctxt, cs, j, &y ); wkbPtr << x << y; } } mDirtyWkb = false; return true; } // case GEOS_GEOM::GEOS_MULTILINESTRING case GEOS_MULTIPOLYGON: // a collection of polygons { //first determine size of geometry int geometrySize = 1 + 2 * sizeof( int ); //endian, type, number of polygons for ( int i = 0; i < GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); i++ ) { const GEOSGeometry *thePoly = GEOSGetGeometryN_r( geosinit.ctxt, mGeos, i ); geometrySize += 1 + 2 * sizeof( int ); //endian, type, number of rings //exterior ring geometrySize += sizeof( int ); //number of points in exterior ring const GEOSGeometry *exRing = GEOSGetExteriorRing_r( geosinit.ctxt, thePoly ); geometrySize += 2 * sizeof( double ) * getNumGeosPoints( exRing ); const GEOSGeometry *intRing = 0; for ( int j = 0; j < GEOSGetNumInteriorRings_r( geosinit.ctxt, thePoly ); j++ ) { geometrySize += sizeof( int ); //number of points in ring intRing = GEOSGetInteriorRingN_r( geosinit.ctxt, thePoly, j ); geometrySize += 2 * sizeof( double ) * getNumGeosPoints( intRing ); } } mGeometry = new unsigned char[geometrySize]; mGeometrySize = geometrySize; QgsWkbPtr wkbPtr( mGeometry ); int numPolygons = GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); wkbPtr << byteOrder << QGis::WKBMultiPolygon << numPolygons; //loop over polygons for ( int i = 0; i < GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); i++ ) { const GEOSGeometry *thePoly = GEOSGetGeometryN_r( geosinit.ctxt, mGeos, i ); int numRings = GEOSGetNumInteriorRings_r( geosinit.ctxt, thePoly ) + 1; //exterior ring const GEOSGeometry *theRing = GEOSGetExteriorRing_r( geosinit.ctxt, thePoly ); int nPointsInRing = getNumGeosPoints( theRing ); wkbPtr << byteOrder << QGis::WKBPolygon << numRings << nPointsInRing; const GEOSCoordSequence *cs = GEOSGeom_getCoordSeq_r( geosinit.ctxt, theRing ); for ( int k = 0; k < nPointsInRing; ++k ) { double x, y; GEOSCoordSeq_getX_r( geosinit.ctxt, cs, k, &x ); GEOSCoordSeq_getY_r( geosinit.ctxt, cs, k, &y ); wkbPtr << x << y; } //interior rings for ( int j = 0; j < GEOSGetNumInteriorRings_r( geosinit.ctxt, thePoly ); j++ ) { theRing = GEOSGetInteriorRingN_r( geosinit.ctxt, thePoly, j ); int nPointsInRing = getNumGeosPoints( theRing ); wkbPtr << nPointsInRing; const GEOSCoordSequence *cs = GEOSGeom_getCoordSeq_r( geosinit.ctxt, theRing ); for ( int k = 0; k < nPointsInRing; ++k ) { double x, y; GEOSCoordSeq_getX_r( geosinit.ctxt, cs, k, &x ); GEOSCoordSeq_getY_r( geosinit.ctxt, cs, k, &y ); wkbPtr << x << y; } } } mDirtyWkb = false; return true; } // case GEOS_GEOM::GEOS_MULTIPOLYGON case GEOS_GEOMETRYCOLLECTION: // a collection of heterogeneus geometries { // TODO QgsDebugMsg( "geometry collection - not supported" ); break; } // case GEOS_GEOM::GEOS_GEOMETRYCOLLECTION } // switch (mGeos->getGeometryTypeId()) return false; } QgsGeometry* QgsGeometry::convertToType( QGis::GeometryType destType, bool destMultipart ) const { switch ( destType ) { case QGis::Point: return convertToPoint( destMultipart ); case QGis::Line: return convertToLine( destMultipart ); case QGis::Polygon: return convertToPolygon( destMultipart ); default: return 0; } } bool QgsGeometry::convertToMultiType() { // TODO: implement with GEOS if ( mDirtyWkb ) { exportGeosToWkb(); } if ( !mGeometry ) { return false; } QGis::WkbType geomType = wkbType(); if ( geomType == QGis::WKBMultiPoint || geomType == QGis::WKBMultiPoint25D || geomType == QGis::WKBMultiLineString || geomType == QGis::WKBMultiLineString25D || geomType == QGis::WKBMultiPolygon || geomType == QGis::WKBMultiPolygon25D || geomType == QGis::WKBUnknown ) { return false; //no need to convert } size_t newGeomSize = mGeometrySize + 1 + 2 * sizeof( int ); //endian: 1, multitype: sizeof(int), number of geometries: sizeof(int) unsigned char* newGeometry = new unsigned char[newGeomSize]; //copy endian char byteOrder = QgsApplication::endian(); QgsWkbPtr wkbPtr( newGeometry ); wkbPtr << byteOrder; //copy wkbtype //todo QGis::WkbType newMultiType; switch ( geomType ) { case QGis::WKBPoint: newMultiType = QGis::WKBMultiPoint; break; case QGis::WKBPoint25D: newMultiType = QGis::WKBMultiPoint25D; break; case QGis::WKBLineString: newMultiType = QGis::WKBMultiLineString; break; case QGis::WKBLineString25D: newMultiType = QGis::WKBMultiLineString25D; break; case QGis::WKBPolygon: newMultiType = QGis::WKBMultiPolygon; break; case QGis::WKBPolygon25D: newMultiType = QGis::WKBMultiPolygon25D; break; default: delete [] newGeometry; return false; } wkbPtr << newMultiType << 1; //copy the existing single geometry memcpy( wkbPtr, mGeometry, mGeometrySize ); delete [] mGeometry; mGeometry = newGeometry; mGeometrySize = newGeomSize; mDirtyGeos = true; return true; } void QgsGeometry::transformVertex( QgsWkbPtr &wkbPtr, const QTransform& trans, bool hasZValue ) { qreal x, y, rotated_x, rotated_y; QgsWkbPtr tmp = wkbPtr; tmp >> x >> y; trans.map( x, y, &rotated_x, &rotated_y ); wkbPtr << rotated_x << rotated_y; if ( hasZValue ) wkbPtr += sizeof( double ); } void QgsGeometry::transformVertex( QgsWkbPtr &wkbPtr, const QgsCoordinateTransform& ct, bool hasZValue ) { double x, y, z = 0.0; QgsWkbPtr tmp = wkbPtr; tmp >> x >> y; ct.transformInPlace( x, y, z ); wkbPtr << x << y; if ( hasZValue ) wkbPtr += sizeof( double ); } GEOSGeometry* QgsGeometry::linePointDifference( GEOSGeometry* GEOSsplitPoint ) { int type = GEOSGeomTypeId_r( geosinit.ctxt, mGeos ); QgsMultiPolyline multiLine; if ( type == GEOS_MULTILINESTRING ) multiLine = asMultiPolyline(); else if ( type == GEOS_LINESTRING ) multiLine = QgsMultiPolyline() << asPolyline(); else return 0; // GEOSsplitPoint will be deleted in the caller, so make a clone QgsGeometry* geosPoint = fromGeosGeom( GEOSGeom_clone_r( geosinit.ctxt, GEOSsplitPoint ) ); QgsPoint splitPoint = geosPoint->asPoint(); delete geosPoint; QgsMultiPolyline lines; QgsPolyline line; QgsPolyline newline; //For each part for ( int i = 0; i < multiLine.size() ; ++i ) { line = multiLine[i]; newline = QgsPolyline(); newline.append( line[0] ); //For each segment for ( int j = 1; j < line.size() - 1 ; ++j ) { newline.append( line[j] ); if ( line[j] == splitPoint ) { lines.append( newline ); newline = QgsPolyline(); newline.append( line[j] ); } } newline.append( line.last() ); lines.append( newline ); } QgsGeometry* splitLines = fromMultiPolyline( lines ); GEOSGeometry* splitGeom = GEOSGeom_clone_r( geosinit.ctxt, splitLines->asGeos() ); delete splitLines; return splitGeom; } int QgsGeometry::splitLinearGeometry( GEOSGeometry *splitLine, QList<QgsGeometry*>& newGeometries ) { if ( !splitLine ) return 2; if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 5; //first test if linestring intersects geometry. If not, return straight away if ( !GEOSIntersects_r( geosinit.ctxt, splitLine, mGeos ) ) return 1; //check that split line has no linear intersection int linearIntersect = GEOSRelatePattern_r( geosinit.ctxt, mGeos, splitLine, "1********" ); if ( linearIntersect > 0 ) return 3; int splitGeomType = GEOSGeomTypeId_r( geosinit.ctxt, splitLine ); GEOSGeometry* splitGeom; if ( splitGeomType == GEOS_POINT ) { splitGeom = linePointDifference( splitLine ); } else { splitGeom = GEOSDifference_r( geosinit.ctxt, mGeos, splitLine ); } QVector<GEOSGeometry*> lineGeoms; int splitType = GEOSGeomTypeId_r( geosinit.ctxt, splitGeom ); if ( splitType == GEOS_MULTILINESTRING ) { int nGeoms = GEOSGetNumGeometries_r( geosinit.ctxt, splitGeom ); for ( int i = 0; i < nGeoms; ++i ) lineGeoms << GEOSGeom_clone_r( geosinit.ctxt, GEOSGetGeometryN_r( geosinit.ctxt, splitGeom, i ) ); } else { lineGeoms << GEOSGeom_clone_r( geosinit.ctxt, splitGeom ); } mergeGeometriesMultiTypeSplit( lineGeoms ); if ( lineGeoms.size() > 0 ) { fromGeos( lineGeoms[0] ); } for ( int i = 1; i < lineGeoms.size(); ++i ) { newGeometries << fromGeosGeom( lineGeoms[i] ); } GEOSGeom_destroy_r( geosinit.ctxt, splitGeom ); return 0; } int QgsGeometry::splitPolygonGeometry( GEOSGeometry* splitLine, QList<QgsGeometry*>& newGeometries ) { if ( !splitLine ) return 2; if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 5; //first test if linestring intersects geometry. If not, return straight away if ( !GEOSIntersects_r( geosinit.ctxt, splitLine, mGeos ) ) return 1; //first union all the polygon rings together (to get them noded, see JTS developer guide) GEOSGeometry *nodedGeometry = nodeGeometries( splitLine, mGeos ); if ( !nodedGeometry ) return 2; //an error occured during noding GEOSGeometry *polygons = GEOSPolygonize_r( geosinit.ctxt, &nodedGeometry, 1 ); if ( !polygons || numberOfGeometries( polygons ) == 0 ) { if ( polygons ) GEOSGeom_destroy_r( geosinit.ctxt, polygons ); GEOSGeom_destroy_r( geosinit.ctxt, nodedGeometry ); return 4; } GEOSGeom_destroy_r( geosinit.ctxt, nodedGeometry ); //test every polygon if contained in original geometry //include in result if yes QVector<GEOSGeometry*> testedGeometries; GEOSGeometry *intersectGeometry = 0; //ratio intersect geometry / geometry. This should be close to 1 //if the polygon belongs to the input geometry for ( int i = 0; i < numberOfGeometries( polygons ); i++ ) { const GEOSGeometry *polygon = GEOSGetGeometryN_r( geosinit.ctxt, polygons, i ); intersectGeometry = GEOSIntersection_r( geosinit.ctxt, mGeos, polygon ); if ( !intersectGeometry ) { QgsDebugMsg( "intersectGeometry is NULL" ); continue; } double intersectionArea; GEOSArea_r( geosinit.ctxt, intersectGeometry, &intersectionArea ); double polygonArea; GEOSArea_r( geosinit.ctxt, polygon, &polygonArea ); const double areaRatio = intersectionArea / polygonArea; if ( areaRatio > 0.99 && areaRatio < 1.01 ) testedGeometries << GEOSGeom_clone_r( geosinit.ctxt, polygon ); GEOSGeom_destroy_r( geosinit.ctxt, intersectGeometry ); } bool splitDone = true; int nGeometriesThis = numberOfGeometries( mGeos ); //original number of geometries if ( testedGeometries.size() == nGeometriesThis ) { splitDone = false; } mergeGeometriesMultiTypeSplit( testedGeometries ); //no split done, preserve original geometry if ( !splitDone ) { for ( int i = 0; i < testedGeometries.size(); ++i ) { GEOSGeom_destroy_r( geosinit.ctxt, testedGeometries[i] ); } return 1; } else if ( testedGeometries.size() > 0 ) //split successfull { GEOSGeom_destroy_r( geosinit.ctxt, mGeos ); mGeos = testedGeometries[0]; mDirtyWkb = true; } int i; for ( i = 1; i < testedGeometries.size() && GEOSisValid_r( geosinit.ctxt, testedGeometries[i] ); ++i ) ; if ( i < testedGeometries.size() ) { for ( i = 0; i < testedGeometries.size(); ++i ) GEOSGeom_destroy_r( geosinit.ctxt, testedGeometries[i] ); return 3; } for ( i = 1; i < testedGeometries.size(); ++i ) newGeometries << fromGeosGeom( testedGeometries[i] ); GEOSGeom_destroy_r( geosinit.ctxt, polygons ); return 0; } GEOSGeometry* QgsGeometry::reshapePolygon( const GEOSGeometry* polygon, const GEOSGeometry* reshapeLineGeos ) { //go through outer shell and all inner rings and check if there is exactly one intersection of a ring and the reshape line int nIntersections = 0; int lastIntersectingRing = -2; const GEOSGeometry* lastIntersectingGeom = 0; int nRings = GEOSGetNumInteriorRings_r( geosinit.ctxt, polygon ); if ( nRings < 0 ) return 0; //does outer ring intersect? const GEOSGeometry* outerRing = GEOSGetExteriorRing_r( geosinit.ctxt, polygon ); if ( GEOSIntersects_r( geosinit.ctxt, outerRing, reshapeLineGeos ) == 1 ) { ++nIntersections; lastIntersectingRing = -1; lastIntersectingGeom = outerRing; } //do inner rings intersect? const GEOSGeometry **innerRings = new const GEOSGeometry*[nRings]; try { for ( int i = 0; i < nRings; ++i ) { innerRings[i] = GEOSGetInteriorRingN_r( geosinit.ctxt, polygon, i ); if ( GEOSIntersects_r( geosinit.ctxt, innerRings[i], reshapeLineGeos ) == 1 ) { ++nIntersections; lastIntersectingRing = i; lastIntersectingGeom = innerRings[i]; } } } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); nIntersections = 0; } if ( nIntersections != 1 ) //reshape line is only allowed to intersect one ring { delete [] innerRings; return 0; } //we have one intersecting ring, let's try to reshape it GEOSGeometry* reshapeResult = reshapeLine( lastIntersectingGeom, reshapeLineGeos ); if ( !reshapeResult ) { delete [] innerRings; return 0; } //if reshaping took place, we need to reassemble the polygon and its rings GEOSGeometry* newRing = 0; const GEOSCoordSequence* reshapeSequence = GEOSGeom_getCoordSeq_r( geosinit.ctxt, reshapeResult ); GEOSCoordSequence* newCoordSequence = GEOSCoordSeq_clone_r( geosinit.ctxt, reshapeSequence ); GEOSGeom_destroy_r( geosinit.ctxt, reshapeResult ); try { newRing = GEOSGeom_createLinearRing_r( geosinit.ctxt, newCoordSequence ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); } if ( !newRing ) { delete [] innerRings; return 0; } GEOSGeometry* newOuterRing = 0; if ( lastIntersectingRing == -1 ) newOuterRing = newRing; else newOuterRing = GEOSGeom_clone_r( geosinit.ctxt, outerRing ); //check if all the rings are still inside the outer boundary QList<GEOSGeometry*> ringList; if ( nRings > 0 ) { GEOSGeometry* outerRingPoly = GEOSGeom_createPolygon_r( geosinit.ctxt, GEOSGeom_clone_r( geosinit.ctxt, newOuterRing ), 0, 0 ); if ( outerRingPoly ) { GEOSGeometry* currentRing = 0; for ( int i = 0; i < nRings; ++i ) { if ( lastIntersectingRing == i ) currentRing = newRing; else currentRing = GEOSGeom_clone_r( geosinit.ctxt, innerRings[i] ); //possibly a ring is no longer contained in the result polygon after reshape if ( GEOSContains_r( geosinit.ctxt, outerRingPoly, currentRing ) == 1 ) ringList.push_back( currentRing ); else GEOSGeom_destroy_r( geosinit.ctxt, currentRing ); } } GEOSGeom_destroy_r( geosinit.ctxt, outerRingPoly ); } GEOSGeometry** newInnerRings = new GEOSGeometry*[ringList.size()]; for ( int i = 0; i < ringList.size(); ++i ) newInnerRings[i] = ringList.at( i ); delete [] innerRings; GEOSGeometry* reshapedPolygon = GEOSGeom_createPolygon_r( geosinit.ctxt, newOuterRing, newInnerRings, ringList.size() ); delete[] newInnerRings; return reshapedPolygon; } GEOSGeometry* QgsGeometry::reshapeLine( const GEOSGeometry* line, const GEOSGeometry* reshapeLineGeos ) { if ( !line || !reshapeLineGeos ) return 0; bool atLeastTwoIntersections = false; try { //make sure there are at least two intersection between line and reshape geometry GEOSGeometry* intersectGeom = GEOSIntersection_r( geosinit.ctxt, line, reshapeLineGeos ); if ( intersectGeom ) { atLeastTwoIntersections = ( GEOSGeomTypeId_r( geosinit.ctxt, intersectGeom ) == GEOS_MULTIPOINT && GEOSGetNumGeometries_r( geosinit.ctxt, intersectGeom ) > 1 ); GEOSGeom_destroy_r( geosinit.ctxt, intersectGeom ); } } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); atLeastTwoIntersections = false; } if ( !atLeastTwoIntersections ) return 0; //begin and end point of original line const GEOSCoordSequence* lineCoordSeq = GEOSGeom_getCoordSeq_r( geosinit.ctxt, line ); if ( !lineCoordSeq ) return 0; unsigned int lineCoordSeqSize; if ( GEOSCoordSeq_getSize_r( geosinit.ctxt, lineCoordSeq, &lineCoordSeqSize ) == 0 ) return 0; if ( lineCoordSeqSize < 2 ) return 0; //first and last vertex of line double x1, y1, x2, y2; GEOSCoordSeq_getX_r( geosinit.ctxt, lineCoordSeq, 0, &x1 ); GEOSCoordSeq_getY_r( geosinit.ctxt, lineCoordSeq, 0, &y1 ); GEOSCoordSeq_getX_r( geosinit.ctxt, lineCoordSeq, lineCoordSeqSize - 1, &x2 ); GEOSCoordSeq_getY_r( geosinit.ctxt, lineCoordSeq, lineCoordSeqSize - 1, &y2 ); GEOSGeometry* beginLineVertex = createGeosPoint( QgsPoint( x1, y1 ) ); GEOSGeometry* endLineVertex = createGeosPoint( QgsPoint( x2, y2 ) ); bool isRing = false; if ( GEOSGeomTypeId_r( geosinit.ctxt, line ) == GEOS_LINEARRING || GEOSEquals_r( geosinit.ctxt, beginLineVertex, endLineVertex ) == 1 ) isRing = true; //node line and reshape line GEOSGeometry* nodedGeometry = nodeGeometries( reshapeLineGeos, line ); if ( !nodedGeometry ) { GEOSGeom_destroy_r( geosinit.ctxt, beginLineVertex ); GEOSGeom_destroy_r( geosinit.ctxt, endLineVertex ); return 0; } //and merge them together GEOSGeometry *mergedLines = GEOSLineMerge_r( geosinit.ctxt, nodedGeometry ); GEOSGeom_destroy_r( geosinit.ctxt, nodedGeometry ); if ( !mergedLines ) { GEOSGeom_destroy_r( geosinit.ctxt, beginLineVertex ); GEOSGeom_destroy_r( geosinit.ctxt, endLineVertex ); return 0; } int numMergedLines = GEOSGetNumGeometries_r( geosinit.ctxt, mergedLines ); if ( numMergedLines < 2 ) //some special cases. Normally it is >2 { GEOSGeom_destroy_r( geosinit.ctxt, beginLineVertex ); GEOSGeom_destroy_r( geosinit.ctxt, endLineVertex ); if ( numMergedLines == 1 ) //reshape line is from begin to endpoint. So we keep the reshapeline return GEOSGeom_clone_r( geosinit.ctxt, reshapeLineGeos ); else return 0; } QList<GEOSGeometry*> resultLineParts; //collection with the line segments that will be contained in result QList<GEOSGeometry*> probableParts; //parts where we can decide on inclusion only after going through all the candidates for ( int i = 0; i < numMergedLines; ++i ) { const GEOSGeometry* currentGeom; currentGeom = GEOSGetGeometryN_r( geosinit.ctxt, mergedLines, i ); const GEOSCoordSequence* currentCoordSeq = GEOSGeom_getCoordSeq_r( geosinit.ctxt, currentGeom ); unsigned int currentCoordSeqSize; GEOSCoordSeq_getSize_r( geosinit.ctxt, currentCoordSeq, &currentCoordSeqSize ); if ( currentCoordSeqSize < 2 ) continue; //get the two endpoints of the current line merge result double xBegin, xEnd, yBegin, yEnd; GEOSCoordSeq_getX_r( geosinit.ctxt, currentCoordSeq, 0, &xBegin ); GEOSCoordSeq_getY_r( geosinit.ctxt, currentCoordSeq, 0, &yBegin ); GEOSCoordSeq_getX_r( geosinit.ctxt, currentCoordSeq, currentCoordSeqSize - 1, &xEnd ); GEOSCoordSeq_getY_r( geosinit.ctxt, currentCoordSeq, currentCoordSeqSize - 1, &yEnd ); GEOSGeometry* beginCurrentGeomVertex = createGeosPoint( QgsPoint( xBegin, yBegin ) ); GEOSGeometry* endCurrentGeomVertex = createGeosPoint( QgsPoint( xEnd, yEnd ) ); //check how many endpoints of the line merge result are on the (original) line int nEndpointsOnOriginalLine = 0; if ( pointContainedInLine( beginCurrentGeomVertex, line ) == 1 ) nEndpointsOnOriginalLine += 1; if ( pointContainedInLine( endCurrentGeomVertex, line ) == 1 ) nEndpointsOnOriginalLine += 1; //check how many endpoints equal the endpoints of the original line int nEndpointsSameAsOriginalLine = 0; if ( GEOSEquals_r( geosinit.ctxt, beginCurrentGeomVertex, beginLineVertex ) == 1 || GEOSEquals_r( geosinit.ctxt, beginCurrentGeomVertex, endLineVertex ) == 1 ) nEndpointsSameAsOriginalLine += 1; if ( GEOSEquals_r( geosinit.ctxt, endCurrentGeomVertex, beginLineVertex ) == 1 || GEOSEquals_r( geosinit.ctxt, endCurrentGeomVertex, endLineVertex ) == 1 ) nEndpointsSameAsOriginalLine += 1; //check if the current geometry overlaps the original geometry (GEOSOverlap does not seem to work with linestrings) bool currentGeomOverlapsOriginalGeom = false; bool currentGeomOverlapsReshapeLine = false; if ( QgsGeometry::lineContainedInLine( currentGeom, line ) == 1 ) currentGeomOverlapsOriginalGeom = true; if ( QgsGeometry::lineContainedInLine( currentGeom, reshapeLineGeos ) == 1 ) currentGeomOverlapsReshapeLine = true; //logic to decide if this part belongs to the result if ( nEndpointsSameAsOriginalLine == 1 && nEndpointsOnOriginalLine == 2 && currentGeomOverlapsOriginalGeom ) { resultLineParts.push_back( GEOSGeom_clone_r( geosinit.ctxt, currentGeom ) ); } //for closed rings, we take one segment from the candidate list else if ( isRing && nEndpointsOnOriginalLine == 2 && currentGeomOverlapsOriginalGeom ) { probableParts.push_back( GEOSGeom_clone_r( geosinit.ctxt, currentGeom ) ); } else if ( nEndpointsOnOriginalLine == 2 && !currentGeomOverlapsOriginalGeom ) { resultLineParts.push_back( GEOSGeom_clone_r( geosinit.ctxt, currentGeom ) ); } else if ( nEndpointsSameAsOriginalLine == 2 && !currentGeomOverlapsOriginalGeom ) { resultLineParts.push_back( GEOSGeom_clone_r( geosinit.ctxt, currentGeom ) ); } else if ( currentGeomOverlapsOriginalGeom && currentGeomOverlapsReshapeLine ) { resultLineParts.push_back( GEOSGeom_clone_r( geosinit.ctxt, currentGeom ) ); } GEOSGeom_destroy_r( geosinit.ctxt, beginCurrentGeomVertex ); GEOSGeom_destroy_r( geosinit.ctxt, endCurrentGeomVertex ); } //add the longest segment from the probable list for rings (only used for polygon rings) if ( isRing && probableParts.size() > 0 ) { GEOSGeometry* maxGeom = 0; //the longest geometry in the probabla list GEOSGeometry* currentGeom = 0; double maxLength = -DBL_MAX; double currentLength = 0; for ( int i = 0; i < probableParts.size(); ++i ) { currentGeom = probableParts.at( i ); GEOSLength_r( geosinit.ctxt, currentGeom, &currentLength ); if ( currentLength > maxLength ) { maxLength = currentLength; GEOSGeom_destroy_r( geosinit.ctxt, maxGeom ); maxGeom = currentGeom; } else { GEOSGeom_destroy_r( geosinit.ctxt, currentGeom ); } } resultLineParts.push_back( maxGeom ); } GEOSGeom_destroy_r( geosinit.ctxt, beginLineVertex ); GEOSGeom_destroy_r( geosinit.ctxt, endLineVertex ); GEOSGeom_destroy_r( geosinit.ctxt, mergedLines ); GEOSGeometry* result = 0; if ( resultLineParts.size() < 1 ) return 0; if ( resultLineParts.size() == 1 ) //the whole result was reshaped { result = resultLineParts[0]; } else //>1 { GEOSGeometry **lineArray = new GEOSGeometry*[resultLineParts.size()]; for ( int i = 0; i < resultLineParts.size(); ++i ) { lineArray[i] = resultLineParts[i]; } //create multiline from resultLineParts GEOSGeometry* multiLineGeom = GEOSGeom_createCollection_r( geosinit.ctxt, GEOS_MULTILINESTRING, lineArray, resultLineParts.size() ); delete [] lineArray; //then do a linemerge with the newly combined partstrings result = GEOSLineMerge_r( geosinit.ctxt, multiLineGeom ); GEOSGeom_destroy_r( geosinit.ctxt, multiLineGeom ); } //now test if the result is a linestring. Otherwise something went wrong if ( GEOSGeomTypeId_r( geosinit.ctxt, result ) != GEOS_LINESTRING ) { GEOSGeom_destroy_r( geosinit.ctxt, result ); return 0; } return result; } int QgsGeometry::topologicalTestPointsSplit( const GEOSGeometry* splitLine, QList<QgsPoint>& testPoints ) const { //Find out the intersection points between splitLineGeos and this geometry. //These points need to be tested for topological correctness by the calling function //if topological editing is enabled testPoints.clear(); GEOSGeometry* intersectionGeom = GEOSIntersection_r( geosinit.ctxt, mGeos, splitLine ); if ( !intersectionGeom ) return 1; bool simple = false; int nIntersectGeoms = 1; if ( GEOSGeomTypeId_r( geosinit.ctxt, intersectionGeom ) == GEOS_LINESTRING || GEOSGeomTypeId_r( geosinit.ctxt, intersectionGeom ) == GEOS_POINT ) simple = true; if ( !simple ) nIntersectGeoms = GEOSGetNumGeometries_r( geosinit.ctxt, intersectionGeom ); for ( int i = 0; i < nIntersectGeoms; ++i ) { const GEOSGeometry* currentIntersectGeom; if ( simple ) currentIntersectGeom = intersectionGeom; else currentIntersectGeom = GEOSGetGeometryN_r( geosinit.ctxt, intersectionGeom, i ); const GEOSCoordSequence* lineSequence = GEOSGeom_getCoordSeq_r( geosinit.ctxt, currentIntersectGeom ); unsigned int sequenceSize = 0; double x, y; if ( GEOSCoordSeq_getSize_r( geosinit.ctxt, lineSequence, &sequenceSize ) != 0 ) { for ( unsigned int i = 0; i < sequenceSize; ++i ) { if ( GEOSCoordSeq_getX_r( geosinit.ctxt, lineSequence, i, &x ) != 0 ) { if ( GEOSCoordSeq_getY_r( geosinit.ctxt, lineSequence, i, &y ) != 0 ) { testPoints.push_back( QgsPoint( x, y ) ); } } } } } GEOSGeom_destroy_r( geosinit.ctxt, intersectionGeom ); return 0; } GEOSGeometry *QgsGeometry::nodeGeometries( const GEOSGeometry *splitLine, const GEOSGeometry *geom ) { if ( !splitLine || !geom ) return 0; if ( GEOSGeomTypeId_r( geosinit.ctxt, geom ) == GEOS_POLYGON || GEOSGeomTypeId_r( geosinit.ctxt, geom ) == GEOS_MULTIPOLYGON ) { GEOSGeometry *geometryBoundary = GEOSBoundary_r( geosinit.ctxt, geom ); GEOSGeometry *unionGeometry = GEOSUnion_r( geosinit.ctxt, splitLine, geometryBoundary ); GEOSGeom_destroy_r( geosinit.ctxt, geometryBoundary ); return unionGeometry; } else { return GEOSUnion_r( geosinit.ctxt, splitLine, geom ); } } int QgsGeometry::lineContainedInLine( const GEOSGeometry* line1, const GEOSGeometry* line2 ) { if ( !line1 || !line2 ) { return -1; } double bufferDistance = pow( 10.0L, geomDigits( line2 ) - 11 ); GEOSGeometry* bufferGeom = GEOSBuffer_r( geosinit.ctxt, line2, bufferDistance, DEFAULT_QUADRANT_SEGMENTS ); if ( !bufferGeom ) return -2; GEOSGeometry* intersectionGeom = GEOSIntersection_r( geosinit.ctxt, bufferGeom, line1 ); //compare ratio between line1Length and intersectGeomLength (usually close to 1 if line1 is contained in line2) double intersectGeomLength; double line1Length; GEOSLength_r( geosinit.ctxt, intersectionGeom, &intersectGeomLength ); GEOSLength_r( geosinit.ctxt, line1, &line1Length ); GEOSGeom_destroy_r( geosinit.ctxt, bufferGeom ); GEOSGeom_destroy_r( geosinit.ctxt, intersectionGeom ); double intersectRatio = line1Length / intersectGeomLength; if ( intersectRatio > 0.9 && intersectRatio < 1.1 ) return 1; return 0; } int QgsGeometry::pointContainedInLine( const GEOSGeometry* point, const GEOSGeometry* line ) { if ( !point || !line ) return -1; double bufferDistance = pow( 10.0L, geomDigits( line ) - 11 ); GEOSGeometry* lineBuffer = GEOSBuffer_r( geosinit.ctxt, line, bufferDistance, 8 ); if ( !lineBuffer ) return -2; bool contained = false; if ( GEOSContains_r( geosinit.ctxt, lineBuffer, point ) == 1 ) contained = true; GEOSGeom_destroy_r( geosinit.ctxt, lineBuffer ); return contained; } int QgsGeometry::geomDigits( const GEOSGeometry* geom ) { GEOSGeometry* bbox = GEOSEnvelope_r( geosinit.ctxt, geom ); if ( !bbox ) return -1; const GEOSGeometry* bBoxRing = GEOSGetExteriorRing_r( geosinit.ctxt, bbox ); if ( !bBoxRing ) return -1; const GEOSCoordSequence* bBoxCoordSeq = GEOSGeom_getCoordSeq_r( geosinit.ctxt, bBoxRing ); if ( !bBoxCoordSeq ) return -1; unsigned int nCoords = 0; if ( !GEOSCoordSeq_getSize_r( geosinit.ctxt, bBoxCoordSeq, &nCoords ) ) return -1; int maxDigits = -1; for ( unsigned int i = 0; i < nCoords - 1; ++i ) { double t; GEOSCoordSeq_getX_r( geosinit.ctxt, bBoxCoordSeq, i, &t ); int digits; digits = ceil( log10( fabs( t ) ) ); if ( digits > maxDigits ) maxDigits = digits; GEOSCoordSeq_getY_r( geosinit.ctxt, bBoxCoordSeq, i, &t ); digits = ceil( log10( fabs( t ) ) ); if ( digits > maxDigits ) maxDigits = digits; } return maxDigits; } int QgsGeometry::numberOfGeometries( GEOSGeometry* g ) const { if ( !g ) return 0; int geometryType = GEOSGeomTypeId_r( geosinit.ctxt, g ); if ( geometryType == GEOS_POINT || geometryType == GEOS_LINESTRING || geometryType == GEOS_LINEARRING || geometryType == GEOS_POLYGON ) return 1; //calling GEOSGetNumGeometries is save for multi types and collections also in geos2 return GEOSGetNumGeometries_r( geosinit.ctxt, g ); } int QgsGeometry::mergeGeometriesMultiTypeSplit( QVector<GEOSGeometry*>& splitResult ) { if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 1; //convert mGeos to geometry collection int type = GEOSGeomTypeId_r( geosinit.ctxt, mGeos ); if ( type != GEOS_GEOMETRYCOLLECTION && type != GEOS_MULTILINESTRING && type != GEOS_MULTIPOLYGON && type != GEOS_MULTIPOINT ) return 0; QVector<GEOSGeometry*> copyList = splitResult; splitResult.clear(); //collect all the geometries that belong to the initial multifeature QVector<GEOSGeometry*> unionGeom; for ( int i = 0; i < copyList.size(); ++i ) { //is this geometry a part of the original multitype? bool isPart = false; for ( int j = 0; j < GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); j++ ) { if ( GEOSEquals_r( geosinit.ctxt, copyList[i], GEOSGetGeometryN_r( geosinit.ctxt, mGeos, j ) ) ) { isPart = true; break; } } if ( isPart ) { unionGeom << copyList[i]; } else { QVector<GEOSGeometry*> geomVector; geomVector << copyList[i]; if ( type == GEOS_MULTILINESTRING ) splitResult << createGeosCollection( GEOS_MULTILINESTRING, geomVector ); else if ( type == GEOS_MULTIPOLYGON ) splitResult << createGeosCollection( GEOS_MULTIPOLYGON, geomVector ); else GEOSGeom_destroy_r( geosinit.ctxt, copyList[i] ); } } //make multifeature out of unionGeom if ( unionGeom.size() > 0 ) { if ( type == GEOS_MULTILINESTRING ) splitResult << createGeosCollection( GEOS_MULTILINESTRING, unionGeom ); else if ( type == GEOS_MULTIPOLYGON ) splitResult << createGeosCollection( GEOS_MULTIPOLYGON, unionGeom ); } else { unionGeom.clear(); } return 0; } QgsPoint QgsGeometry::asPoint( QgsConstWkbPtr &wkbPtr, bool hasZValue ) const { wkbPtr += 1 + sizeof( int ); double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); return QgsPoint( x, y ); } QgsPolyline QgsGeometry::asPolyline( QgsConstWkbPtr &wkbPtr, bool hasZValue ) const { wkbPtr += 1 + sizeof( int ); unsigned int nPoints; wkbPtr >> nPoints; QgsPolyline line( nPoints ); // Extract the points from the WKB format into the x and y vectors. for ( uint i = 0; i < nPoints; ++i ) { double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); line[i] = QgsPoint( x, y ); } return line; } QgsPolygon QgsGeometry::asPolygon( QgsConstWkbPtr &wkbPtr, bool hasZValue ) const { wkbPtr += 1 + sizeof( int ); // get number of rings in the polygon unsigned int numRings; wkbPtr >> numRings; if ( numRings == 0 ) // sanity check for zero rings in polygon return QgsPolygon(); QgsPolygon rings( numRings ); for ( uint idx = 0; idx < numRings; idx++ ) { int nPoints; wkbPtr >> nPoints; QgsPolyline ring( nPoints ); for ( int jdx = 0; jdx < nPoints; jdx++ ) { double x, y; wkbPtr >> x >> y; if ( hasZValue ) wkbPtr += sizeof( double ); ring[jdx] = QgsPoint( x, y ); } rings[idx] = ring; } return rings; } QgsPoint QgsGeometry::asPoint() const { QGis::WkbType type = wkbType(); if ( type != QGis::WKBPoint && type != QGis::WKBPoint25D ) return QgsPoint( 0, 0 ); QgsConstWkbPtr wkbPtr( mGeometry ); return asPoint( wkbPtr, type == QGis::WKBPoint25D ); } QgsPolyline QgsGeometry::asPolyline() const { QGis::WkbType type = wkbType(); if ( type != QGis::WKBLineString && type != QGis::WKBLineString25D ) return QgsPolyline(); QgsConstWkbPtr wkbPtr( mGeometry ); return asPolyline( wkbPtr, type == QGis::WKBLineString25D ); } QgsPolygon QgsGeometry::asPolygon() const { QGis::WkbType type = wkbType(); if ( type != QGis::WKBPolygon && type != QGis::WKBPolygon25D ) return QgsPolygon(); QgsConstWkbPtr wkbPtr( mGeometry ); return asPolygon( wkbPtr, type == QGis::WKBPolygon25D ); } QgsMultiPoint QgsGeometry::asMultiPoint() const { QGis::WkbType type = wkbType(); if ( type != QGis::WKBMultiPoint && type != QGis::WKBMultiPoint25D ) return QgsMultiPoint(); bool hasZValue = ( type == QGis::WKBMultiPoint25D ); QgsConstWkbPtr wkbPtr( mGeometry + 1 + sizeof( int ) ); int nPoints; wkbPtr >> nPoints; QgsMultiPoint points( nPoints ); for ( int i = 0; i < nPoints; i++ ) { points[i] = asPoint( wkbPtr, hasZValue ); } return points; } QgsMultiPolyline QgsGeometry::asMultiPolyline() const { QGis::WkbType type = wkbType(); if ( type != QGis::WKBMultiLineString && type != QGis::WKBMultiLineString25D ) return QgsMultiPolyline(); bool hasZValue = ( type == QGis::WKBMultiLineString25D ); QgsConstWkbPtr wkbPtr( mGeometry + 1 + sizeof( int ) ); int numLineStrings; wkbPtr >> numLineStrings; QgsMultiPolyline lines( numLineStrings ); for ( int i = 0; i < numLineStrings; i++ ) lines[i] = asPolyline( wkbPtr, hasZValue ); return lines; } QgsMultiPolygon QgsGeometry::asMultiPolygon() const { QGis::WkbType type = wkbType(); if ( type != QGis::WKBMultiPolygon && type != QGis::WKBMultiPolygon25D ) return QgsMultiPolygon(); bool hasZValue = ( type == QGis::WKBMultiPolygon25D ); QgsConstWkbPtr wkbPtr( mGeometry + 1 + sizeof( int ) ); int numPolygons; wkbPtr >> numPolygons; QgsMultiPolygon polygons( numPolygons ); for ( int i = 0; i < numPolygons; i++ ) polygons[i] = asPolygon( wkbPtr, hasZValue ); return polygons; } double QgsGeometry::area() const { if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return -1.0; double area; try { if ( GEOSArea_r( geosinit.ctxt, mGeos, &area ) == 0 ) return -1.0; } CATCH_GEOS( -1.0 ) return area; } double QgsGeometry::length() const { if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return -1.0; double length; try { if ( GEOSLength_r( geosinit.ctxt, mGeos, &length ) == 0 ) return -1.0; } CATCH_GEOS( -1.0 ) return length; } double QgsGeometry::distance( const QgsGeometry& geom ) const { if ( mDirtyGeos ) exportWkbToGeos(); if ( geom.mDirtyGeos ) geom.exportWkbToGeos(); if ( !mGeos || !geom.mGeos ) return -1.0; double dist = -1.0; try { GEOSDistance_r( geosinit.ctxt, mGeos, geom.mGeos, &dist ); } CATCH_GEOS( -1.0 ) return dist; } QgsGeometry* QgsGeometry::buffer( double distance, int segments ) const { if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 0; try { return fromGeosGeom( GEOSBuffer_r( geosinit.ctxt, mGeos, distance, segments ) ); } CATCH_GEOS( 0 ) } QgsGeometry*QgsGeometry::buffer( double distance, int segments, int endCapStyle, int joinStyle, double mitreLimit ) const { #if defined(GEOS_VERSION_MAJOR) && defined(GEOS_VERSION_MINOR) && \ ((GEOS_VERSION_MAJOR>3) || ((GEOS_VERSION_MAJOR==3) && (GEOS_VERSION_MINOR>=3))) if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 0; try { return fromGeosGeom( GEOSBufferWithStyle_r( geosinit.ctxt, mGeos, distance, segments, endCapStyle, joinStyle, mitreLimit ) ); } CATCH_GEOS( 0 ) #else return 0; #endif } QgsGeometry* QgsGeometry::offsetCurve( double distance, int segments, int joinStyle, double mitreLimit ) const { #if defined(GEOS_VERSION_MAJOR) && defined(GEOS_VERSION_MINOR) && \ ((GEOS_VERSION_MAJOR>3) || ((GEOS_VERSION_MAJOR==3) && (GEOS_VERSION_MINOR>=3))) if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos || this->type() != QGis::Line ) return 0; try { return fromGeosGeom( GEOSOffsetCurve_r( geosinit.ctxt, mGeos, distance, segments, joinStyle, mitreLimit ) ); } CATCH_GEOS( 0 ) #else return 0; #endif } QgsGeometry* QgsGeometry::simplify( double tolerance ) const { if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 0; try { return fromGeosGeom( GEOSTopologyPreserveSimplify_r( geosinit.ctxt, mGeos, tolerance ) ); } CATCH_GEOS( 0 ) } QgsGeometry* QgsGeometry::smooth( const unsigned int iterations, const double offset ) const { switch ( wkbType() ) { case QGis::WKBPoint: case QGis::WKBPoint25D: case QGis::WKBMultiPoint: case QGis::WKBMultiPoint25D: //can't smooth a point based geometry return new QgsGeometry( *this ); case QGis::WKBLineString: case QGis::WKBLineString25D: { QgsPolyline line = asPolyline(); return QgsGeometry::fromPolyline( smoothLine( line, iterations, offset ) ); } case QGis::WKBMultiLineString: case QGis::WKBMultiLineString25D: { QgsMultiPolyline multiline = asMultiPolyline(); QgsMultiPolyline resultMultiline; QgsMultiPolyline::const_iterator lineIt = multiline.constBegin(); for ( ; lineIt != multiline.constEnd(); ++lineIt ) { resultMultiline << smoothLine( *lineIt, iterations, offset ); } return QgsGeometry::fromMultiPolyline( resultMultiline ); } case QGis::WKBPolygon: case QGis::WKBPolygon25D: { QgsPolygon poly = asPolygon(); return QgsGeometry::fromPolygon( smoothPolygon( poly, iterations, offset ) ); } case QGis::WKBMultiPolygon: case QGis::WKBMultiPolygon25D: { QgsMultiPolygon multipoly = asMultiPolygon(); QgsMultiPolygon resultMultipoly; QgsMultiPolygon::const_iterator polyIt = multipoly.constBegin(); for ( ; polyIt != multipoly.constEnd(); ++polyIt ) { resultMultipoly << smoothPolygon( *polyIt, iterations, offset ); } return QgsGeometry::fromMultiPolygon( resultMultipoly ); } break; case QGis::WKBUnknown: default: return new QgsGeometry( *this ); } } inline QgsPoint interpolatePointOnLine( const QgsPoint& p1, const QgsPoint& p2, const double offset ) { double deltaX = p2.x() - p1.x(); double deltaY = p2.y() - p1.y(); return QgsPoint( p1.x() + deltaX * offset, p1.y() + deltaY * offset ); } QgsPolyline QgsGeometry::smoothLine( const QgsPolyline& polyline, const unsigned int iterations, const double offset ) const { QgsPolyline result = polyline; for ( unsigned int iteration = 0; iteration < iterations; ++iteration ) { QgsPolyline outputLine = QgsPolyline(); for ( int i = 0; i < result.count() - 1; i++ ) { const QgsPoint& p1 = result.at( i ); const QgsPoint& p2 = result.at( i + 1 ); outputLine << ( i == 0 ? result.at( i ) : interpolatePointOnLine( p1, p2, offset ) ); outputLine << ( i == result.count() - 2 ? result.at( i + 1 ) : interpolatePointOnLine( p1, p2, 1.0 - offset ) ); } result = outputLine; } return result; } QgsPolygon QgsGeometry::smoothPolygon( const QgsPolygon& polygon, const unsigned int iterations, const double offset ) const { QgsPolygon resultPoly; QgsPolygon::const_iterator ringIt = polygon.constBegin(); for ( ; ringIt != polygon.constEnd(); ++ringIt ) { QgsPolyline resultRing = *ringIt; for ( unsigned int iteration = 0; iteration < iterations; ++iteration ) { QgsPolyline outputRing = QgsPolyline(); for ( int i = 0; i < resultRing.count() - 1; ++i ) { const QgsPoint& p1 = resultRing.at( i ); const QgsPoint& p2 = resultRing.at( i + 1 ); outputRing << interpolatePointOnLine( p1, p2, offset ); outputRing << interpolatePointOnLine( p1, p2, 1.0 - offset ); } //close polygon outputRing << outputRing.at( 0 ); resultRing = outputRing; } resultPoly << resultRing; } return resultPoly; } QgsGeometry* QgsGeometry::centroid() const { if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 0; try { return fromGeosGeom( GEOSGetCentroid_r( geosinit.ctxt, mGeos ) ); } CATCH_GEOS( 0 ) } QgsGeometry* QgsGeometry::pointOnSurface() const { if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 0; try { return fromGeosGeom( GEOSPointOnSurface_r( geosinit.ctxt, mGeos ) ); } CATCH_GEOS( 0 ) } QgsGeometry* QgsGeometry::convexHull() const { if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 0; try { return fromGeosGeom( GEOSConvexHull_r( geosinit.ctxt, mGeos ) ); } CATCH_GEOS( 0 ) } QgsGeometry* QgsGeometry::interpolate( double distance ) const { #if defined(GEOS_VERSION_MAJOR) && defined(GEOS_VERSION_MINOR) && \ ((GEOS_VERSION_MAJOR>3) || ((GEOS_VERSION_MAJOR==3) && (GEOS_VERSION_MINOR>=2))) if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return 0; try { return fromGeosGeom( GEOSInterpolate_r( geosinit.ctxt, mGeos, distance ) ); } CATCH_GEOS( 0 ) #else QgsMessageLog::logMessage( QObject::tr( "GEOS prior to 3.2 doesn't support GEOSInterpolate" ), QObject::tr( "GEOS" ) ); return NULL; #endif } QgsGeometry* QgsGeometry::intersection( const QgsGeometry* geometry ) const { if ( !geometry ) return NULL; if ( mDirtyGeos ) exportWkbToGeos(); if ( geometry->mDirtyGeos ) geometry->exportWkbToGeos(); if ( !mGeos || !geometry->mGeos ) return 0; try { return fromGeosGeom( GEOSIntersection_r( geosinit.ctxt, mGeos, geometry->mGeos ) ); } CATCH_GEOS( 0 ) } QgsGeometry* QgsGeometry::combine( const QgsGeometry *geometry ) const { if ( !geometry ) return NULL; if ( mDirtyGeos ) exportWkbToGeos(); if ( geometry->mDirtyGeos ) geometry->exportWkbToGeos(); if ( !mGeos || !geometry->mGeos ) return 0; try { GEOSGeometry* unionGeom = GEOSUnion_r( geosinit.ctxt, mGeos, geometry->mGeos ); if ( !unionGeom ) return 0; if ( type() == QGis::Line ) { GEOSGeometry* mergedGeom = GEOSLineMerge_r( geosinit.ctxt, unionGeom ); if ( mergedGeom ) { GEOSGeom_destroy_r( geosinit.ctxt, unionGeom ); unionGeom = mergedGeom; } } return fromGeosGeom( unionGeom ); } CATCH_GEOS( new QgsGeometry( *this ) ) //return this geometry if union not possible } QgsGeometry* QgsGeometry::difference( const QgsGeometry* geometry ) const { if ( !geometry ) return NULL; if ( mDirtyGeos ) exportWkbToGeos(); if ( geometry->mDirtyGeos ) geometry->exportWkbToGeos(); if ( !mGeos || !geometry->mGeos ) return 0; try { return fromGeosGeom( GEOSDifference_r( geosinit.ctxt, mGeos, geometry->mGeos ) ); } CATCH_GEOS( 0 ) } QgsGeometry* QgsGeometry::symDifference( const QgsGeometry* geometry ) const { if ( !geometry ) return NULL; if ( mDirtyGeos ) exportWkbToGeos(); if ( geometry->mDirtyGeos ) geometry->exportWkbToGeos(); if ( !mGeos || !geometry->mGeos ) return 0; try { return fromGeosGeom( GEOSSymDifference_r( geosinit.ctxt, mGeos, geometry->mGeos ) ); } CATCH_GEOS( 0 ) } QList<QgsGeometry*> QgsGeometry::asGeometryCollection() const { if ( mDirtyGeos ) exportWkbToGeos(); if ( !mGeos ) return QList<QgsGeometry*>(); int type = GEOSGeomTypeId_r( geosinit.ctxt, mGeos ); QgsDebugMsg( "geom type: " + QString::number( type ) ); QList<QgsGeometry*> geomCollection; if ( type != GEOS_MULTIPOINT && type != GEOS_MULTILINESTRING && type != GEOS_MULTIPOLYGON && type != GEOS_GEOMETRYCOLLECTION ) { // we have a single-part geometry - put there a copy of this one geomCollection.append( new QgsGeometry( *this ) ); return geomCollection; } int count = GEOSGetNumGeometries_r( geosinit.ctxt, mGeos ); QgsDebugMsg( "geom count: " + QString::number( count ) ); for ( int i = 0; i < count; ++i ) { const GEOSGeometry * geometry = GEOSGetGeometryN_r( geosinit.ctxt, mGeos, i ); geomCollection.append( fromGeosGeom( GEOSGeom_clone_r( geosinit.ctxt, geometry ) ) ); } return geomCollection; } QPointF QgsGeometry::asQPointF() const { QgsPoint point = asPoint(); return point.toQPointF(); } QPolygonF QgsGeometry::asQPolygonF() const { QPolygonF result; QgsPolyline polyline; QGis::WkbType type = wkbType(); if ( type == QGis::WKBLineString || type == QGis::WKBLineString25D ) { polyline = asPolyline(); } else if ( type == QGis::WKBPolygon || type == QGis::WKBPolygon25D ) { QgsPolygon polygon = asPolygon(); if ( polygon.size() < 1 ) return result; polyline = polygon.at( 0 ); } else { return result; } QgsPolyline::const_iterator lineIt = polyline.constBegin(); for ( ; lineIt != polyline.constEnd(); ++lineIt ) { result << lineIt->toQPointF(); } return result; } bool QgsGeometry::deleteRing( int ringNum, int partNum ) { if ( ringNum <= 0 || partNum < 0 ) return false; switch ( wkbType() ) { case QGis::WKBPolygon25D: case QGis::WKBPolygon: { if ( partNum != 0 ) return false; QgsPolygon polygon = asPolygon(); if ( ringNum >= polygon.count() ) return false; polygon.remove( ringNum ); QgsGeometry* g2 = QgsGeometry::fromPolygon( polygon ); *this = *g2; delete g2; return true; } case QGis::WKBMultiPolygon25D: case QGis::WKBMultiPolygon: { QgsMultiPolygon mpolygon = asMultiPolygon(); if ( partNum >= mpolygon.count() ) return false; if ( ringNum >= mpolygon[partNum].count() ) return false; mpolygon[partNum].remove( ringNum ); QgsGeometry* g2 = QgsGeometry::fromMultiPolygon( mpolygon ); *this = *g2; delete g2; return true; } default: return false; // only makes sense with polygons and multipolygons } } bool QgsGeometry::deletePart( int partNum ) { if ( partNum < 0 ) return false; switch ( wkbType() ) { case QGis::WKBMultiPoint25D: case QGis::WKBMultiPoint: { QgsMultiPoint mpoint = asMultiPoint(); if ( partNum >= mpoint.size() || mpoint.size() == 1 ) return false; mpoint.remove( partNum ); QgsGeometry* g2 = QgsGeometry::fromMultiPoint( mpoint ); *this = *g2; delete g2; break; } case QGis::WKBMultiLineString25D: case QGis::WKBMultiLineString: { QgsMultiPolyline mline = asMultiPolyline(); if ( partNum >= mline.size() || mline.size() == 1 ) return false; mline.remove( partNum ); QgsGeometry* g2 = QgsGeometry::fromMultiPolyline( mline ); *this = *g2; delete g2; break; } case QGis::WKBMultiPolygon25D: case QGis::WKBMultiPolygon: { QgsMultiPolygon mpolygon = asMultiPolygon(); if ( partNum >= mpolygon.size() || mpolygon.size() == 1 ) return false; mpolygon.remove( partNum ); QgsGeometry* g2 = QgsGeometry::fromMultiPolygon( mpolygon ); *this = *g2; delete g2; break; } default: // single part geometries are ignored return false; } return true; } /** Return union of several geometries - try to use unary union if available (GEOS >= 3.3) otherwise use a cascade of unions. * Takes ownership of passed geometries, returns a new instance */ static GEOSGeometry* _makeUnion( QList<GEOSGeometry*> geoms ) { #if defined(GEOS_VERSION_MAJOR) && defined(GEOS_VERSION_MINOR) && (((GEOS_VERSION_MAJOR==3) && (GEOS_VERSION_MINOR>=3)) || (GEOS_VERSION_MAJOR>3)) GEOSGeometry* geomCollection = 0; geomCollection = createGeosCollection( GEOS_GEOMETRYCOLLECTION, geoms.toVector() ); GEOSGeometry* geomUnion = GEOSUnaryUnion_r( geosinit.ctxt, geomCollection ); GEOSGeom_destroy_r( geosinit.ctxt, geomCollection ); return geomUnion; #else GEOSGeometry* geomCollection = geoms.takeFirst(); while ( !geoms.isEmpty() ) { GEOSGeometry* g = geoms.takeFirst(); GEOSGeometry* geomCollectionNew = GEOSUnion_r( geosinit.ctxt, geomCollection, g ); GEOSGeom_destroy_r( geosinit.ctxt, geomCollection ); GEOSGeom_destroy_r( geosinit.ctxt, g ); geomCollection = geomCollectionNew; } return geomCollection; #endif } int QgsGeometry::avoidIntersections( QMap<QgsVectorLayer*, QSet< QgsFeatureId > > ignoreFeatures ) { int returnValue = 0; //check if g has polygon type if ( type() != QGis::Polygon ) return 1; QGis::WkbType geomTypeBeforeModification = wkbType(); //read avoid intersections list from project properties bool listReadOk; QStringList avoidIntersectionsList = QgsProject::instance()->readListEntry( "Digitizing", "/AvoidIntersectionsList", QStringList(), &listReadOk ); if ( !listReadOk ) return true; //no intersections stored in project does not mean error QList<GEOSGeometry*> nearGeometries; //go through list, convert each layer to vector layer and call QgsVectorLayer::removePolygonIntersections for each QgsVectorLayer* currentLayer = 0; QStringList::const_iterator aIt = avoidIntersectionsList.constBegin(); for ( ; aIt != avoidIntersectionsList.constEnd(); ++aIt ) { currentLayer = dynamic_cast<QgsVectorLayer*>( QgsMapLayerRegistry::instance()->mapLayer( *aIt ) ); if ( currentLayer ) { QgsFeatureIds ignoreIds; QMap<QgsVectorLayer*, QSet<qint64> >::const_iterator ignoreIt = ignoreFeatures.find( currentLayer ); if ( ignoreIt != ignoreFeatures.constEnd() ) ignoreIds = ignoreIt.value(); QgsFeatureIterator fi = currentLayer->getFeatures( QgsFeatureRequest( boundingBox() ) .setFlags( QgsFeatureRequest::ExactIntersect ) .setSubsetOfAttributes( QgsAttributeList() ) ); QgsFeature f; while ( fi.nextFeature( f ) ) { if ( ignoreIds.contains( f.id() ) ) continue; if ( !f.geometry() ) continue; nearGeometries << GEOSGeom_clone_r( geosinit.ctxt, f.geometry()->asGeos() ); } } } if ( nearGeometries.isEmpty() ) return 0; GEOSGeometry* nearGeometriesUnion = 0; GEOSGeometry* geomWithoutIntersections = 0; try { nearGeometriesUnion = _makeUnion( nearGeometries ); geomWithoutIntersections = GEOSDifference_r( geosinit.ctxt, asGeos(), nearGeometriesUnion ); fromGeos( geomWithoutIntersections ); GEOSGeom_destroy_r( geosinit.ctxt, nearGeometriesUnion ); } catch ( GEOSException &e ) { if ( nearGeometriesUnion ) GEOSGeom_destroy_r( geosinit.ctxt, nearGeometriesUnion ); if ( geomWithoutIntersections ) GEOSGeom_destroy_r( geosinit.ctxt, geomWithoutIntersections ); QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); return 3; } //make sure the geometry still has the same type (e.g. no change from polygon to multipolygon) if ( wkbType() != geomTypeBeforeModification ) return 2; return returnValue; } void QgsGeometry::validateGeometry( QList<Error> &errors ) { QgsGeometryValidator::validateGeometry( this, errors ); } bool QgsGeometry::isGeosValid() const { try { const GEOSGeometry *g = asGeos(); if ( !g ) return false; return GEOSisValid_r( geosinit.ctxt, g ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); return false; } } bool QgsGeometry::isGeosEqual( const QgsGeometry &g ) const { return geosRelOp( GEOSEquals_r, this, &g ); } bool QgsGeometry::isGeosEmpty() const { try { const GEOSGeometry *g = asGeos(); if ( !g ) return false; return GEOSisEmpty_r( geosinit.ctxt, g ); } catch ( GEOSException &e ) { QgsMessageLog::logMessage( QObject::tr( "Exception: %1" ).arg( e.what() ), QObject::tr( "GEOS" ) ); return false; } } double QgsGeometry::leftOf( double x, double y, double& x1, double& y1, double& x2, double& y2 ) const { double f1 = x - x1; double f2 = y2 - y1; double f3 = y - y1; double f4 = x2 - x1; return f1*f2 - f3*f4; } QgsGeometry* QgsGeometry::convertToPoint( bool destMultipart ) const { switch ( type() ) { case QGis::Point: { bool srcIsMultipart = isMultipart(); if (( destMultipart && srcIsMultipart ) || ( !destMultipart && !srcIsMultipart ) ) { // return a copy of the same geom return new QgsGeometry( *this ); } if ( destMultipart ) { // layer is multipart => make a multipoint with a single point return fromMultiPoint( QgsMultiPoint() << asPoint() ); } else { // destination is singlepart => make a single part if possible QgsMultiPoint multiPoint = asMultiPoint(); if ( multiPoint.count() == 1 ) { return fromPoint( multiPoint[0] ); } } return 0; } case QGis::Line: { // only possible if destination is multipart if ( !destMultipart ) return 0; // input geometry is multipart if ( isMultipart() ) { QgsMultiPolyline multiLine = asMultiPolyline(); QgsMultiPoint multiPoint; for ( QgsMultiPolyline::const_iterator multiLineIt = multiLine.constBegin(); multiLineIt != multiLine.constEnd(); ++multiLineIt ) for ( QgsPolyline::const_iterator lineIt = ( *multiLineIt ).constBegin(); lineIt != ( *multiLineIt ).constEnd(); ++lineIt ) multiPoint << *lineIt; return fromMultiPoint( multiPoint ); } // input geometry is not multipart: copy directly the line into a multipoint else { QgsPolyline line = asPolyline(); if ( !line.isEmpty() ) return fromMultiPoint( line ); } return 0; } case QGis::Polygon: { // can only transform if destination is multipoint if ( !destMultipart ) return 0; // input geometry is multipart: make a multipoint from multipolygon if ( isMultipart() ) { QgsMultiPolygon multiPolygon = asMultiPolygon(); QgsMultiPoint multiPoint; for ( QgsMultiPolygon::const_iterator polygonIt = multiPolygon.constBegin(); polygonIt != multiPolygon.constEnd(); ++polygonIt ) for ( QgsMultiPolyline::const_iterator multiLineIt = ( *polygonIt ).constBegin(); multiLineIt != ( *polygonIt ).constEnd(); ++multiLineIt ) for ( QgsPolyline::const_iterator lineIt = ( *multiLineIt ).constBegin(); lineIt != ( *multiLineIt ).constEnd(); ++lineIt ) multiPoint << *lineIt; return fromMultiPoint( multiPoint ); } // input geometry is not multipart: make a multipoint from polygon else { QgsPolygon polygon = asPolygon(); QgsMultiPoint multiPoint; for ( QgsMultiPolyline::const_iterator multiLineIt = polygon.constBegin(); multiLineIt != polygon.constEnd(); ++multiLineIt ) for ( QgsPolyline::const_iterator lineIt = ( *multiLineIt ).constBegin(); lineIt != ( *multiLineIt ).constEnd(); ++lineIt ) multiPoint << *lineIt; return fromMultiPoint( multiPoint ); } } default: return 0; } } QgsGeometry* QgsGeometry::convertToLine( bool destMultipart ) const { switch ( type() ) { case QGis::Point: { if ( !isMultipart() ) return 0; QgsMultiPoint multiPoint = asMultiPoint(); if ( multiPoint.count() < 2 ) return 0; if ( destMultipart ) return fromMultiPolyline( QgsMultiPolyline() << multiPoint ); else return fromPolyline( multiPoint ); } case QGis::Line: { bool srcIsMultipart = isMultipart(); if (( destMultipart && srcIsMultipart ) || ( !destMultipart && ! srcIsMultipart ) ) { // return a copy of the same geom return new QgsGeometry( *this ); } if ( destMultipart ) { // destination is multipart => makes a multipoint with a single line QgsPolyline line = asPolyline(); if ( !line.isEmpty() ) return fromMultiPolyline( QgsMultiPolyline() << line ); } else { // destination is singlepart => make a single part if possible QgsMultiPolyline multiLine = asMultiPolyline(); if ( multiLine.count() == 1 ) return fromPolyline( multiLine[0] ); } return 0; } case QGis::Polygon: { // input geometry is multipolygon if ( isMultipart() ) { QgsMultiPolygon multiPolygon = asMultiPolygon(); QgsMultiPolyline multiLine; for ( QgsMultiPolygon::const_iterator polygonIt = multiPolygon.constBegin(); polygonIt != multiPolygon.constEnd(); ++polygonIt ) for ( QgsMultiPolyline::const_iterator multiLineIt = ( *polygonIt ).constBegin(); multiLineIt != ( *polygonIt ).constEnd(); ++multiLineIt ) multiLine << *multiLineIt; if ( destMultipart ) { // destination is multipart return fromMultiPolyline( multiLine ); } else if ( multiLine.count() == 1 ) { // destination is singlepart => make a single part if possible return fromPolyline( multiLine[0] ); } } // input geometry is single polygon else { QgsPolygon polygon = asPolygon(); // if polygon has rings if ( polygon.count() > 1 ) { // cannot fit a polygon with rings in a single line layer // TODO: would it be better to remove rings? if ( destMultipart ) { QgsPolygon polygon = asPolygon(); QgsMultiPolyline multiLine; for ( QgsMultiPolyline::const_iterator multiLineIt = polygon.constBegin(); multiLineIt != polygon.constEnd(); ++multiLineIt ) multiLine << *multiLineIt; return fromMultiPolyline( multiLine ); } } // no rings else if ( polygon.count() == 1 ) { if ( destMultipart ) { return fromMultiPolyline( polygon ); } else { return fromPolyline( polygon[0] ); } } } return 0; } default: return 0; } } QgsGeometry* QgsGeometry::convertToPolygon( bool destMultipart ) const { switch ( type() ) { case QGis::Point: { if ( !isMultipart() ) return 0; QgsMultiPoint multiPoint = asMultiPoint(); if ( multiPoint.count() < 3 ) return 0; if ( multiPoint.last() != multiPoint.first() ) multiPoint << multiPoint.first(); QgsPolygon polygon = QgsPolygon() << multiPoint; if ( destMultipart ) return fromMultiPolygon( QgsMultiPolygon() << polygon ); else return fromPolygon( polygon ); } case QGis::Line: { // input geometry is multiline if ( isMultipart() ) { QgsMultiPolyline multiLine = asMultiPolyline(); QgsMultiPolygon multiPolygon; for ( QgsMultiPolyline::iterator multiLineIt = multiLine.begin(); multiLineIt != multiLine.end(); ++multiLineIt ) { // do not create polygon for a 1 segment line if (( *multiLineIt ).count() < 3 ) return 0; if (( *multiLineIt ).count() == 3 && ( *multiLineIt ).first() == ( *multiLineIt ).last() ) return 0; // add closing node if (( *multiLineIt ).first() != ( *multiLineIt ).last() ) *multiLineIt << ( *multiLineIt ).first(); multiPolygon << ( QgsPolygon() << *multiLineIt ); } // check that polygons were inserted if ( !multiPolygon.isEmpty() ) { if ( destMultipart ) { return fromMultiPolygon( multiPolygon ); } else if ( multiPolygon.count() == 1 ) { // destination is singlepart => make a single part if possible return fromPolygon( multiPolygon[0] ); } } } // input geometry is single line else { QgsPolyline line = asPolyline(); // do not create polygon for a 1 segment line if ( line.count() < 3 ) return 0; if ( line.count() == 3 && line.first() == line.last() ) return 0; // add closing node if ( line.first() != line.last() ) line << line.first(); // destination is multipart if ( destMultipart ) { return fromMultiPolygon( QgsMultiPolygon() << ( QgsPolygon() << line ) ); } else { return fromPolygon( QgsPolygon() << line ); } } return 0; } case QGis::Polygon: { bool srcIsMultipart = isMultipart(); if (( destMultipart && srcIsMultipart ) || ( !destMultipart && ! srcIsMultipart ) ) { // return a copy of the same geom return new QgsGeometry( *this ); } if ( destMultipart ) { // destination is multipart => makes a multipoint with a single polygon QgsPolygon polygon = asPolygon(); if ( !polygon.isEmpty() ) return fromMultiPolygon( QgsMultiPolygon() << polygon ); } else { QgsMultiPolygon multiPolygon = asMultiPolygon(); if ( multiPolygon.count() == 1 ) { // destination is singlepart => make a single part if possible return fromPolygon( multiPolygon[0] ); } } return 0; } default: return 0; } } QgsGeometry *QgsGeometry::unaryUnion( const QList<QgsGeometry *> &geometryList ) { QList<GEOSGeometry*> geoms; foreach ( QgsGeometry* g, geometryList ) { geoms.append( GEOSGeom_clone_r( geosinit.ctxt, g->asGeos() ) ); } GEOSGeometry *geomUnion = _makeUnion( geoms ); QgsGeometry *ret = new QgsGeometry(); ret->fromGeos( geomUnion ); return ret; } bool QgsGeometry::compare( const QgsPolyline &p1, const QgsPolyline &p2, double epsilon ) { if ( p1.count() != p2.count() ) return false; for ( int i = 0; i < p1.count(); ++i ) { if ( !p1.at( i ).compare( p2.at( i ), epsilon ) ) return false; } return true; } bool QgsGeometry::compare( const QgsPolygon &p1, const QgsPolygon &p2, double epsilon ) { if ( p1.count() != p2.count() ) return false; for ( int i = 0; i < p1.count(); ++i ) { if ( !QgsGeometry::compare( p1.at( i ), p2.at( i ), epsilon ) ) return false; } return true; } bool QgsGeometry::compare( const QgsMultiPolygon &p1, const QgsMultiPolygon &p2, double epsilon ) { if ( p1.count() != p2.count() ) return false; for ( int i = 0; i < p1.count(); ++i ) { if ( !QgsGeometry::compare( p1.at( i ), p2.at( i ), epsilon ) ) return false; } return true; }<|fim▁end|>
}
<|file_name|>account_reconcile.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import time from osv import fields, osv from tools.translate import _ class account_move_line_reconcile(osv.osv_memory): """ Account move line reconcile wizard, it checks for the write off the reconcile entry or directly reconcile. """ _name = 'account.move.line.reconcile' _description = 'Account move line reconcile' _columns = { 'trans_nbr': fields.integer('# of Transaction', readonly=True), 'credit': fields.float('Credit amount', readonly=True), 'debit': fields.float('Debit amount', readonly=True), 'writeoff': fields.float('Write-Off amount', readonly=True), } def default_get(self, cr, uid, fields, context=None): res = super(account_move_line_reconcile, self).default_get(cr, uid, fields, context=context) data = self.trans_rec_get(cr, uid, context['active_ids'], context) if 'trans_nbr' in fields: res.update({'trans_nbr':data['trans_nbr']}) if 'credit' in fields: res.update({'credit':data['credit']}) if 'debit' in fields: res.update({'debit':data['debit']}) if 'writeoff' in fields: res.update({'writeoff':data['writeoff']}) return res def trans_rec_get(self, cr, uid, ids, context=None): account_move_line_obj = self.pool.get('account.move.line') if context is None: context = {} credit = debit = 0 account_id = False count = 0 for line in account_move_line_obj.browse(cr, uid, context['active_ids'], context=context): if not line.reconcile_id and not line.reconcile_id.id: count += 1 credit += line.credit debit += line.debit account_id = line.account_id.id return {'trans_nbr': count, 'account_id': account_id, 'credit': credit, 'debit': debit, 'writeoff': debit - credit} def trans_rec_addendum_writeoff(self, cr, uid, ids, context=None): return self.pool.get('account.move.line.reconcile.writeoff').trans_rec_addendum(cr, uid, ids, context) def trans_rec_reconcile_partial_reconcile(self, cr, uid, ids, context=None): return self.pool.get('account.move.line.reconcile.writeoff').trans_rec_reconcile_partial(cr, uid, ids, context) def trans_rec_reconcile_full(self, cr, uid, ids, context=None): account_move_line_obj = self.pool.get('account.move.line') period_obj = self.pool.get('account.period') date = False period_id = False journal_id= False account_id = False if context is None: context = {} date = time.strftime('%Y-%m-%d') ids = period_obj.find(cr, uid, dt=date, context=context) if ids: period_id = ids[0] #stop the reconciliation process by partner (manual reconciliation) only if there is nothing more to reconcile for this partner if 'active_ids' in context and context['active_ids']: tmp_ml_id = account_move_line_obj.browse(cr, uid, context['active_ids'], context)[0] partner_id = tmp_ml_id.partner_id and tmp_ml_id.partner_id.id or False debit_ml_ids = account_move_line_obj.search(cr, uid, [('partner_id', '=', partner_id), ('account_id.reconcile', '=', True), ('reconcile_id', '=', False), ('debit', '>', 0)], context=context) credit_ml_ids = account_move_line_obj.search(cr, uid, [('partner_id', '=', partner_id), ('account_id.reconcile', '=', True), ('reconcile_id', '=', False), ('credit', '>', 0)], context=context) for ml_id in context['active_ids']: if ml_id in debit_ml_ids: debit_ml_ids.remove(ml_id) if ml_id in credit_ml_ids: credit_ml_ids.remove(ml_id) if not debit_ml_ids and credit_ml_ids: context.update({'stop_reconcile': True}) account_move_line_obj.reconcile(cr, uid, context['active_ids'], 'manual', account_id, period_id, journal_id, context=context) return {'type': 'ir.actions.act_window_close'} account_move_line_reconcile() class account_move_line_reconcile_writeoff(osv.osv_memory):<|fim▁hole|> """ _name = 'account.move.line.reconcile.writeoff' _description = 'Account move line reconcile (writeoff)' _columns = { 'journal_id': fields.many2one('account.journal','Write-Off Journal', required=True), 'writeoff_acc_id': fields.many2one('account.account','Write-Off account', required=True), 'date_p': fields.date('Date'), 'comment': fields.char('Comment', size= 64, required=True), 'analytic_id': fields.many2one('account.analytic.account', 'Analytic Account', domain=[('parent_id', '!=', False)]), } _defaults = { 'date_p': lambda *a: time.strftime('%Y-%m-%d'), 'comment': 'Write-off', } def trans_rec_addendum(self, cr, uid, ids, context=None): mod_obj = self.pool.get('ir.model.data') if context is None: context = {} model_data_ids = mod_obj.search(cr, uid,[('model','=','ir.ui.view'),('name','=','account_move_line_reconcile_writeoff')], context=context) resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id'] return { 'name': _('Reconcile Writeoff'), 'context': context, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'account.move.line.reconcile.writeoff', 'views': [(resource_id,'form')], 'type': 'ir.actions.act_window', 'target': 'new', } def trans_rec_reconcile_partial(self, cr, uid, ids, context=None): account_move_line_obj = self.pool.get('account.move.line') if context is None: context = {} account_move_line_obj.reconcile_partial(cr, uid, context['active_ids'], 'manual', context=context) return {'type': 'ir.actions.act_window_close'} def trans_rec_reconcile(self, cr, uid, ids, context=None): account_move_line_obj = self.pool.get('account.move.line') period_obj = self.pool.get('account.period') if context is None: context = {} data = self.read(cr, uid, ids,context=context)[0] account_id = data['writeoff_acc_id'][0] context['date_p'] = data['date_p'] journal_id = data['journal_id'][0] context['comment'] = data['comment'] if data['analytic_id']: context['analytic_id'] = data['analytic_id'][0] if context['date_p']: date = context['date_p'] ids = period_obj.find(cr, uid, dt=date, context=context) if ids: period_id = ids[0] context.update({'stop_reconcile': True}) account_move_line_obj.reconcile(cr, uid, context['active_ids'], 'manual', account_id, period_id, journal_id, context=context) return {'type': 'ir.actions.act_window_close'} account_move_line_reconcile_writeoff() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
""" It opens the write off wizard form, in that user can define the journal, account, analytic account for reconcile
<|file_name|>background.tsx<|end_file_name|><|fim▁begin|><|fim▁hole|> const fragShader = ` precision mediump; `; class Background extends React.Component<any, any> { } export { Background };<|fim▁end|>
import * as React from 'react';
<|file_name|>pygame.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: UTF-8 -*- from __future__ import absolute_import from __future__ import with_statement from __future__ import division from __future__ import print_function ''' Collected utilities for pygame It is difficult to write pixels directly in python. There's some way to get a framebuffer back from Tk, but it is cumberosme. The module pygame supports sending pixel buffers, which is wrapped for convneinece in this module. example usage import neurotools.graphics.pygame as npg import time import numpy as np import pygame K = 128 screen = npg.start(K,K,'Image data') dt = 1/20 wait_til = time.time() + dt print('Animating..') for i in neurotools.tools.progress_bar(range(100)): t = time.time() if t<wait_til: time.sleep(wait_til-t) wait_til = t + dt npg.draw_array(screen, np.random.rand(K,K,3)) pygame.quit() ''' import sys import numpy as np try: import pygame as pg except: print('pygame package is missing; it is obsolete so this is not unusual') print('pygame graphics will not work') pg = None def enable_vsync(): if sys.platform != 'darwin': return try: import ctypes import ctypes.util ogl = ctypes.cdll.LoadLibrary(ctypes.util.find_library("OpenGL")) # set v to 1 to enable vsync, 0 to disable vsync v = ctypes.c_int(1) ogl.CGLSetParameter(ogl.CGLGetCurrentContext(), ctypes.c_int(222), ctypes.pointer(v)) except: print("Unable to set vsync mode, using driver defaults") def start(W,H,name='untitled'): # Get things going pg.quit() pg.init() enable_vsync() window = pg.display.set_mode((W,H)) pg.display.set_caption(name) return window def draw_array(screen,rgbdata,doshow=True): ''' Send array data to a PyGame window. PyGame is BRG order which is unusual -- reorder it. Parameters ---------- screen : object Object returned by neurotools.graphics.pygame.start rgbdata : RGB image data with color values in [0,1] ''' # Cast to int rgbdata = np.int32(rgbdata*255) # clip bytes to 0..255 range rgbdata[rgbdata<0]=0 rgbdata[rgbdata>255]=255 # get color dimension if len(rgbdata.shape)==3: w,h,d = rgbdata.shape else: w,h = rgbdata.shape d=1 # repack color data in screen format draw = np.zeros((w,h,4),'uint8') if d==1: draw[...,0]=rgbdata draw[...,1]=rgbdata draw[...,2]=rgbdata draw[...,3]=255 # alpha channel if d==3: draw[...,:3]=rgbdata[...,::-1] draw[...,-1]=255 # alpha channel if d==4: draw[...,:3]=rgbdata[...,-2::-1] draw[...,-1]=rgbdata[...,-1] # get surface and copy data to sceeen surface = pg.Surface((w,h)) numpy_surface = np.frombuffer(surface.get_buffer())<|fim▁hole|> del numpy_surface screen.blit(surface,(0,0)) if doshow: pg.display.update()<|fim▁end|>
numpy_surface[...] = np.frombuffer(draw)
<|file_name|>slice-2.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test that slicing syntax gives errors if we have not implemented the trait. struct Foo; fn main() {<|fim▁hole|> x[..Foo]; //~ ERROR cannot take a slice of a value with type `Foo` x[Foo..Foo]; //~ ERROR cannot take a slice of a value with type `Foo` x[mut]; //~ ERROR cannot take a mutable slice of a value with type `Foo` x[mut Foo..]; //~ ERROR cannot take a mutable slice of a value with type `Foo` x[mut ..Foo]; //~ ERROR cannot take a mutable slice of a value with type `Foo` x[mut Foo..Foo]; //~ ERROR cannot take a mutable slice of a value with type `Foo` }<|fim▁end|>
let x = Foo; x[]; //~ ERROR cannot take a slice of a value with type `Foo` x[Foo..]; //~ ERROR cannot take a slice of a value with type `Foo`
<|file_name|>dev_runner.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import re import shutil import signal import stat import subprocess import sys import time from spinnaker.configurator import InstallationParameters from spinnaker.fetch import AWS_METADATA_URL from spinnaker.fetch import GOOGLE_METADATA_URL from spinnaker.fetch import GOOGLE_INSTANCE_METADATA_URL from spinnaker.fetch import is_aws_instance from spinnaker.fetch import is_google_instance from spinnaker.fetch import check_fetch from spinnaker.fetch import fetch from spinnaker.yaml_util import YamlBindings from spinnaker.validate_configuration import ValidateConfig from spinnaker import spinnaker_runner def populate_aws_yml(content): aws_dict = {'enabled': False} if is_aws_instance(): zone = (check_fetch(AWS_METADATA_URL + '/placement/availability-zone') .content) aws_dict['enabled'] = 'true' aws_dict['defaultRegion'] = zone[:-1] elif os.path.exists(os.path.join(os.environ['HOME'], '.aws/credentials')): aws_dict['enabled'] = 'true' aws_dict['defaultRegion'] = 'us-east-1' bindings = YamlBindings() bindings.import_dict({'providers': {'aws': aws_dict}}) content = bindings.transform_yaml_source(content, 'providers.aws.enabled') content = bindings.transform_yaml_source(content, 'providers.aws.defaultRegion') return content def populate_google_yml(content): credentials = {'project': '', 'jsonPath': ''} google_dict = {'enabled': False, 'defaultRegion': 'us-central1', 'defaultZone': 'us-central1-f',} google_dict['primaryCredentials'] = credentials if is_google_instance(): zone = os.path.basename( check_fetch(GOOGLE_INSTANCE_METADATA_URL + '/zone', google=True).content) google_dict['enabled'] = 'true' google_dict['defaultRegion'] = zone[:-2] google_dict['defaultZone'] = zone credentials['project'] = check_fetch( GOOGLE_METADATA_URL + '/project/project-id', google=True).content bindings = YamlBindings() bindings.import_dict({'providers': {'google': google_dict}}) content = bindings.transform_yaml_source(content, 'providers.google.enabled') content = bindings.transform_yaml_source( content, 'providers.google.defaultRegion') content = bindings.transform_yaml_source( content, 'providers.google.defaultZone') content = bindings.transform_yaml_source( content, 'providers.google.primaryCredentials.project') content = bindings.transform_yaml_source( content, 'providers.google.primaryCredentials.jsonPath') return content class DevInstallationParameters(InstallationParameters): """Specialization of the normal production InstallationParameters. This is a developer deployment where the paths are setup to run directly out of this repository rather than a standard system installation. Also, custom configuration parameters come from the $HOME/.spinnaker rather than the normal installation location of /opt/spinnaker/config. """ DEV_SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__)) SUBSYSTEM_ROOT_DIR = os.getcwd() USER_CONFIG_DIR = os.path.join(os.environ['HOME'], '.spinnaker') LOG_DIR = os.path.join(SUBSYSTEM_ROOT_DIR, 'logs') SPINNAKER_INSTALL_DIR = os.path.abspath( os.path.join(DEV_SCRIPT_DIR, '..')) INSTALLED_CONFIG_DIR = os.path.abspath( os.path.join(DEV_SCRIPT_DIR, '../config')) UTILITY_SCRIPT_DIR = os.path.abspath( os.path.join(DEV_SCRIPT_DIR, '../runtime')) EXTERNAL_DEPENDENCY_SCRIPT_DIR = os.path.abspath( os.path.join(DEV_SCRIPT_DIR, '../runtime')) DECK_INSTALL_DIR = os.path.join(SUBSYSTEM_ROOT_DIR, 'deck') HACK_DECK_SETTINGS_FILENAME = 'settings.js' DECK_PORT = 9000 class DevRunner(spinnaker_runner.Runner): """Specialization of the normal spinnaker runner for development use. This class has different behaviors than the normal runner. It follows similar heuristics for launching and stopping jobs, however, the details differ in fundamental ways. * The subsystems are run from their source (using gradle) and will attempt to rebuild before running. * Spinnaker will be reconfigured on each invocation. The runner will display all the events to the subsystem error logs to the console for as long as this script is running. When the script terminates, the console will no longer show the error log, but the processes will remain running, and continue logging to the logs directory. """ @staticmethod def maybe_generate_clean_user_local(): """Generate a spinnaker-local.yml file without environment variables refs""" user_dir = DevInstallationParameters.USER_CONFIG_DIR user_config_path = os.path.join(user_dir, 'spinnaker-local.yml') if os.path.exists(user_config_path): return if not os.path.exists(user_dir): os.mkdir(user_dir) with open('{config_dir}/default-spinnaker-local.yml'.format( config_dir=DevInstallationParameters.INSTALLED_CONFIG_DIR), 'r') as f: content = f.read() content = populate_aws_yml(content) content = populate_google_yml(content) with open(user_config_path, 'w') as f: f.write(content) os.chmod(user_config_path, 0600) def __init__(self, installation_parameters=None): self.maybe_generate_clean_user_local() installation = installation_parameters or DevInstallationParameters super(DevRunner, self).__init__(installation) def start_subsystem(self, subsystem, environ=None): """Starts the specified subsystem. Args: subsystem [string]: The repository name of the subsystem to run. """ print 'Starting {subsystem}'.format(subsystem=subsystem) command = os.path.join( self.installation.SUBSYSTEM_ROOT_DIR, subsystem, 'start_dev.sh') return self.run_daemon(command, [command], environ=environ) def tail_error_logs(self): """Start a background tail job of all the component error logs.""" log_dir = self.installation.LOG_DIR try: os.makedirs(log_dir) except OSError: pass tail_jobs = [] for subsystem in self.get_all_subsystem_names(): path = os.path.join(log_dir, subsystem + '.err') open(path, 'w').close() tail_jobs.append(self.start_tail(path)) return tail_jobs def get_deck_pid(self): """Return the process id for deck, or None.""" program='node ./node_modules/webpack-dev-server/bin/webpack-dev-server.js' stdout, stderr = subprocess.Popen( 'ps -fwwwC node', stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, close_fds=True).communicate() match = re.search('(?m)^[^ ]+ +([0-9]+) .* {program}'.format( program=program), stdout) return int(match.group(1)) if match else None def start_deck(self): """Start subprocess for deck.""" pid = self.get_deck_pid() if pid: print 'Deck is already running as pid={pid}'.format(pid=pid) return pid path = os.path.join(self.installation.SUBSYSTEM_ROOT_DIR, 'deck/start_dev.sh') return self.run_daemon(path, [path]) def stop_deck(self): """Stop subprocess for deck.""" pid = self.get_deck_pid() if pid: print 'Terminating deck in pid={pid}'.format(pid=pid) os.kill(pid, signal.SIGTERM) def start_all(self, options):<|fim▁hole|> The subsystems are in forked processes disassociated from this, so will continue running even after this process exists. Only the stderr logging to console will stop once this process is terminated. However, the logging will still continue into the LOG_DIR. """ ValidateConfig(self.configurator).check_validate() self.configurator.update_deck_settings() ignore_tail_jobs = self.tail_error_logs() super(DevRunner, self).start_all(options) deck_port = self.installation.DECK_PORT print 'Waiting for deck to start on port {port}'.format(port=deck_port) # Tail the log file while we wait and run. # But the log file might not yet exist if deck hasn't started yet. # So wait for the log file to exist before starting to tail it. # Deck cant be ready yet if it hasn't started yet anyway. deck_log_path = os.path.join(self.installation.LOG_DIR, 'deck.log') while not os.path.exists(deck_log_path): time.sleep(0.1) ignore_tail_jobs.append(self.start_tail(deck_log_path)) # Don't just wait for port to be ready, but for deck to respond # because it takes a long time to startup once port is ready. while True: code, ignore = fetch('http://localhost:{port}/'.format(port=deck_port)) if code == 200: break else: time.sleep(0.1) print """Spinnaker is now ready on port {port}. You can ^C (ctrl-c) to finish the script, which will stop emitting errors. Spinnaker will continue until you run ../spinnaker/dev/stop_dev.sh """.format(port=deck_port) while True: time.sleep(3600) def program_to_subsystem(self, program): return program def subsystem_to_program(self, subsystem): return subsystem if __name__ == '__main__': if not os.path.exists('deck'): sys.stderr.write('This script needs to be run from the root of' ' your build directory.\n') sys.exit(-1) DevRunner.main()<|fim▁end|>
"""Starts all the components then logs stderr to the console forever.
<|file_name|>planets4.py<|end_file_name|><|fim▁begin|>f = open("data/planetsc.txt", "r") earth = 0 for line in f: planet = line.strip().lower() if planet[0] == "#": continue earth += 1 if planet == "earth":<|fim▁hole|><|fim▁end|>
break print "Earth is planet #%d" % earth
<|file_name|>num.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Integer and floating-point number formatting // FIXME: #6220 Implement floating point formatting #![allow(unsigned_negation)] use fmt; use iter::IteratorExt; use num::{Int, cast}; use slice::SliceExt; use str; /// A type that represents a specific radix #[doc(hidden)] trait GenericRadix { /// The number of digits. fn base(&self) -> u8; /// A radix-specific prefix string. fn prefix(&self) -> &'static str { "" } /// Converts an integer to corresponding radix digit.<|fim▁hole|> /// Format an integer using the radix using a formatter. fn fmt_int<T: Int>(&self, mut x: T, f: &mut fmt::Formatter) -> fmt::Result { // The radix can be as low as 2, so we need a buffer of at least 64 // characters for a base 2 number. let zero = Int::zero(); let is_positive = x >= zero; let mut buf = [0; 64]; let mut curr = buf.len(); let base = cast(self.base()).unwrap(); if is_positive { // Accumulate each digit of the number from the least significant // to the most significant figure. for byte in buf.iter_mut().rev() { let n = x % base; // Get the current place value. x = x / base; // Deaccumulate the number. *byte = self.digit(cast(n).unwrap()); // Store the digit in the buffer. curr -= 1; if x == zero { break }; // No more digits left to accumulate. } } else { // Do the same as above, but accounting for two's complement. for byte in buf.iter_mut().rev() { let n = zero - (x % base); // Get the current place value. x = x / base; // Deaccumulate the number. *byte = self.digit(cast(n).unwrap()); // Store the digit in the buffer. curr -= 1; if x == zero { break }; // No more digits left to accumulate. } } let buf = unsafe { str::from_utf8_unchecked(&buf[curr..]) }; f.pad_integral(is_positive, self.prefix(), buf) } } /// A binary (base 2) radix #[derive(Clone, PartialEq)] struct Binary; /// An octal (base 8) radix #[derive(Clone, PartialEq)] struct Octal; /// A decimal (base 10) radix #[derive(Clone, PartialEq)] struct Decimal; /// A hexadecimal (base 16) radix, formatted with lower-case characters #[derive(Clone, PartialEq)] struct LowerHex; /// A hexadecimal (base 16) radix, formatted with upper-case characters #[derive(Clone, PartialEq)] struct UpperHex; macro_rules! radix { ($T:ident, $base:expr, $prefix:expr, $($x:pat => $conv:expr),+) => { impl GenericRadix for $T { fn base(&self) -> u8 { $base } fn prefix(&self) -> &'static str { $prefix } fn digit(&self, x: u8) -> u8 { match x { $($x => $conv,)+ x => panic!("number not in the range 0..{}: {}", self.base() - 1, x), } } } } } radix! { Binary, 2, "0b", x @ 0 ... 2 => b'0' + x } radix! { Octal, 8, "0o", x @ 0 ... 7 => b'0' + x } radix! { Decimal, 10, "", x @ 0 ... 9 => b'0' + x } radix! { LowerHex, 16, "0x", x @ 0 ... 9 => b'0' + x, x @ 10 ... 15 => b'a' + (x - 10) } radix! { UpperHex, 16, "0x", x @ 0 ... 9 => b'0' + x, x @ 10 ... 15 => b'A' + (x - 10) } /// A radix with in the range of `2..36`. #[derive(Clone, Copy, PartialEq)] #[unstable(feature = "core", reason = "may be renamed or move to a different module")] pub struct Radix { base: u8, } impl Radix { fn new(base: u8) -> Radix { assert!(2 <= base && base <= 36, "the base must be in the range of 2..36: {}", base); Radix { base: base } } } impl GenericRadix for Radix { fn base(&self) -> u8 { self.base } fn digit(&self, x: u8) -> u8 { match x { x @ 0 ... 9 => b'0' + x, x if x < self.base() => b'a' + (x - 10), x => panic!("number not in the range 0..{}: {}", self.base() - 1, x), } } } /// A helper type for formatting radixes. #[unstable(feature = "core", reason = "may be renamed or move to a different module")] #[derive(Copy)] pub struct RadixFmt<T, R>(T, R); /// Constructs a radix formatter in the range of `2..36`. /// /// # Examples /// /// ``` /// use std::fmt::radix; /// assert_eq!(format!("{}", radix(55, 36)), "1j".to_string()); /// ``` #[unstable(feature = "core", reason = "may be renamed or move to a different module")] pub fn radix<T>(x: T, base: u8) -> RadixFmt<T, Radix> { RadixFmt(x, Radix::new(base)) } macro_rules! radix_fmt { ($T:ty as $U:ty, $fmt:ident) => { #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for RadixFmt<$T, Radix> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Display for RadixFmt<$T, Radix> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { RadixFmt(ref x, radix) => radix.$fmt(*x as $U, f) } } } } } macro_rules! int_base { ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => { #[stable(feature = "rust1", since = "1.0.0")] impl fmt::$Trait for $T { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { $Radix.fmt_int(*self as $U, f) } } } } macro_rules! debug { ($T:ident) => { #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for $T { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } } } macro_rules! integer { ($Int:ident, $Uint:ident) => { int_base! { Display for $Int as $Int -> Decimal } int_base! { Binary for $Int as $Uint -> Binary } int_base! { Octal for $Int as $Uint -> Octal } int_base! { LowerHex for $Int as $Uint -> LowerHex } int_base! { UpperHex for $Int as $Uint -> UpperHex } radix_fmt! { $Int as $Int, fmt_int } debug! { $Int } int_base! { Display for $Uint as $Uint -> Decimal } int_base! { Binary for $Uint as $Uint -> Binary } int_base! { Octal for $Uint as $Uint -> Octal } int_base! { LowerHex for $Uint as $Uint -> LowerHex } int_base! { UpperHex for $Uint as $Uint -> UpperHex } radix_fmt! { $Uint as $Uint, fmt_int } debug! { $Uint } } } integer! { isize, usize } integer! { i8, u8 } integer! { i16, u16 } integer! { i32, u32 } integer! { i64, u64 }<|fim▁end|>
fn digit(&self, x: u8) -> u8;
<|file_name|>derive_object_with_raw_idents.rs<|end_file_name|><|fim▁begin|>use juniper::{ execute, graphql_object, graphql_value, EmptyMutation, EmptySubscription, GraphQLInputObject, RootNode, Value, Variables, }; pub struct Query; #[graphql_object] impl Query { fn r#type(r#fn: MyInputType) -> Vec<String> { let _ = r#fn; unimplemented!() } } #[derive(GraphQLInputObject, Debug, PartialEq)] struct MyInputType { r#trait: String, } #[tokio::test] async fn supports_raw_idents_in_types_and_args() { let doc = r#" { __type(name: "Query") { fields { name args { name } } } } "#; let value = run_type_info_query(&doc).await; assert_eq!( value, graphql_value!( { "__type": { "fields": [ { "name": "type", "args": [ { "name": "fn" } ] } ] } } ), ); } #[tokio::test] async fn supports_raw_idents_in_fields_of_input_types() { let doc = r#" { __type(name: "MyInputType") { inputFields { name } } } "#; let value = run_type_info_query(&doc).await; assert_eq!( value, graphql_value!( { "__type": { "inputFields": [ { "name": "trait", } ] } } ), ); } async fn run_type_info_query(doc: &str) -> Value { let schema = RootNode::new( Query, EmptyMutation::<()>::new(), EmptySubscription::<()>::new(), ); let (result, errs) = execute(doc, None, &schema, &Variables::new(), &()) .await .expect("Execution failed"); assert_eq!(errs, []); <|fim▁hole|>}<|fim▁end|>
println!("Result: {:#?}", result); result
<|file_name|>types.go<|end_file_name|><|fim▁begin|>/* Copyright 2014 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1 import ( "time" "k8s.io/kubernetes/pkg/api/unversioned" apiv1 "k8s.io/kubernetes/pkg/api/v1" client "k8s.io/kubernetes/pkg/client/unversioned" ) type Policy struct { unversioned.TypeMeta `json:",inline"` // Holds the information to configure the fit predicate functions Predicates []PredicatePolicy `json:"predicates"` // Holds the information to configure the priority functions Priorities []PriorityPolicy `json:"priorities"` // Holds the information to communicate with the extender(s) ExtenderConfigs []ExtenderConfig `json:"extenders"` } type PredicatePolicy struct { // Identifier of the predicate policy // For a custom predicate, the name can be user-defined // For the Kubernetes provided predicates, the name is the identifier of the pre-defined predicate Name string `json:"name"` // Holds the parameters to configure the given predicate Argument *PredicateArgument `json:"argument"` } type PriorityPolicy struct { // Identifier of the priority policy // For a custom priority, the name can be user-defined // For the Kubernetes provided priority functions, the name is the identifier of the pre-defined priority function Name string `json:"name"` // The numeric multiplier for the node scores that the priority function generates // The weight should be non-zero and can be a positive or a negative integer Weight int `json:"weight"` // Holds the parameters to configure the given priority function Argument *PriorityArgument `json:"argument"` } // Represents the arguments that the different types of predicates take // Only one of its members may be specified type PredicateArgument struct { // The predicate that provides affinity for pods belonging to a service // It uses a label to identify nodes that belong to the same "group" ServiceAffinity *ServiceAffinity `json:"serviceAffinity"` // The predicate that checks whether a particular node has a certain label // defined or not, regardless of value LabelsPresence *LabelsPresence `json:"labelsPresence"` } // Represents the arguments that the different types of priorities take. // Only one of its members may be specified<|fim▁hole|> // The priority function that checks whether a particular node has a certain label // defined or not, regardless of value LabelPreference *LabelPreference `json:"labelPreference"` } // Holds the parameters that are used to configure the corresponding predicate type ServiceAffinity struct { // The list of labels that identify node "groups" // All of the labels should match for the node to be considered a fit for hosting the pod Labels []string `json:"labels"` } // Holds the parameters that are used to configure the corresponding predicate type LabelsPresence struct { // The list of labels that identify node "groups" // All of the labels should be either present (or absent) for the node to be considered a fit for hosting the pod Labels []string `json:"labels"` // The boolean flag that indicates whether the labels should be present or absent from the node Presence bool `json:"presence"` } // Holds the parameters that are used to configure the corresponding priority function type ServiceAntiAffinity struct { // Used to identify node "groups" Label string `json:"label"` } // Holds the parameters that are used to configure the corresponding priority function type LabelPreference struct { // Used to identify node "groups" Label string `json:"label"` // This is a boolean flag // If true, higher priority is given to nodes that have the label // If false, higher priority is given to nodes that do not have the label Presence bool `json:"presence"` } // Holds the parameters used to communicate with the extender. If a verb is unspecified/empty, // it is assumed that the extender chose not to provide that extension. type ExtenderConfig struct { // URLPrefix at which the extender is available URLPrefix string `json:"urlPrefix"` // Verb for the filter call, empty if not supported. This verb is appended to the URLPrefix when issuing the filter call to extender. FilterVerb string `json:"filterVerb,omitempty"` // Verb for the prioritize call, empty if not supported. This verb is appended to the URLPrefix when issuing the prioritize call to extender. PrioritizeVerb string `json:"prioritizeVerb,omitempty"` // The numeric multiplier for the node scores that the prioritize call generates. // The weight should be a positive integer Weight int `json:"weight,omitempty"` // EnableHttps specifies whether https should be used to communicate with the extender EnableHttps bool `json:"enableHttps,omitempty"` // TLSConfig specifies the transport layer security config TLSConfig *client.TLSClientConfig `json:"tlsConfig,omitempty"` // HTTPTimeout specifies the timeout duration for a call to the extender. Filter timeout fails the scheduling of the pod. Prioritize // timeout is ignored, k8s/other extenders priorities are used to select the node. HTTPTimeout time.Duration `json:"httpTimeout,omitempty"` } // ExtenderArgs represents the arguments needed by the extender to filter/prioritize // nodes for a pod. type ExtenderArgs struct { // Pod being scheduled Pod apiv1.Pod `json:"pod"` // List of candidate nodes where the pod can be scheduled Nodes apiv1.NodeList `json:"nodes"` } // ExtenderFilterResult represents the results of a filter call to an extender type ExtenderFilterResult struct { // Filtered set of nodes where the pod can be scheduled Nodes apiv1.NodeList `json:"nodes,omitempty"` // Error message indicating failure Error string `json:"error,omitempty"` } // HostPriority represents the priority of scheduling to a particular host, higher priority is better. type HostPriority struct { // Name of the host Host string `json:"host"` // Score associated with the host Score int `json:"score"` } type HostPriorityList []HostPriority func (h HostPriorityList) Len() int { return len(h) } func (h HostPriorityList) Less(i, j int) bool { if h[i].Score == h[j].Score { return h[i].Host < h[j].Host } return h[i].Score < h[j].Score } func (h HostPriorityList) Swap(i, j int) { h[i], h[j] = h[j], h[i] }<|fim▁end|>
type PriorityArgument struct { // The priority function that ensures a good spread (anti-affinity) for pods belonging to a service // It uses a label to identify nodes that belong to the same "group" ServiceAntiAffinity *ServiceAntiAffinity `json:"serviceAntiAffinity"`
<|file_name|>test_feedexport.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import import os import csv import json from io import BytesIO import tempfile import shutil from six.moves.urllib.parse import urlparse from zope.interface.verify import verifyObject from twisted.trial import unittest from twisted.internet import defer from scrapy.crawler import CrawlerRunner from scrapy.settings import Settings from tests.mockserver import MockServer from w3lib.url import path_to_file_uri import scrapy from scrapy.extensions.feedexport import ( IFeedStorage, FileFeedStorage, FTPFeedStorage, S3FeedStorage, StdoutFeedStorage, BlockingFeedStorage) from scrapy.utils.test import assert_aws_environ, get_s3_content_and_delete, get_crawler from scrapy.utils.python import to_native_str class FileFeedStorageTest(unittest.TestCase): def test_store_file_uri(self): path = os.path.abspath(self.mktemp()) uri = path_to_file_uri(path) return self._assert_stores(FileFeedStorage(uri), path) def test_store_file_uri_makedirs(self): path = os.path.abspath(self.mktemp()) path = os.path.join(path, 'more', 'paths', 'file.txt') uri = path_to_file_uri(path) return self._assert_stores(FileFeedStorage(uri), path) def test_store_direct_path(self): path = os.path.abspath(self.mktemp()) return self._assert_stores(FileFeedStorage(path), path) def test_store_direct_path_relative(self): path = self.mktemp() return self._assert_stores(FileFeedStorage(path), path) def test_interface(self): path = self.mktemp() st = FileFeedStorage(path) verifyObject(IFeedStorage, st) @defer.inlineCallbacks def _assert_stores(self, storage, path): spider = scrapy.Spider("default") file = storage.open(spider) file.write(b"content") yield storage.store(file) self.assertTrue(os.path.exists(path)) with open(path, 'rb') as fp: self.assertEqual(fp.read(), b"content") class FTPFeedStorageTest(unittest.TestCase): def test_store(self): uri = os.environ.get('FEEDTEST_FTP_URI') path = os.environ.get('FEEDTEST_FTP_PATH') if not (uri and path): raise unittest.SkipTest("No FTP server available for testing") st = FTPFeedStorage(uri) verifyObject(IFeedStorage, st) return self._assert_stores(st, path) @defer.inlineCallbacks def _assert_stores(self, storage, path): spider = scrapy.Spider("default") file = storage.open(spider) file.write(b"content") yield storage.store(file) self.assertTrue(os.path.exists(path)) with open(path, 'rb') as fp: self.assertEqual(fp.read(), b"content") # again, to check s3 objects are overwritten yield storage.store(BytesIO(b"new content")) with open(path, 'rb') as fp: self.assertEqual(fp.read(), b"new content") class BlockingFeedStorageTest(unittest.TestCase): def get_test_spider(self, settings=None): class TestSpider(scrapy.Spider): name = 'test_spider' crawler = get_crawler(settings_dict=settings) spider = TestSpider.from_crawler(crawler) return spider def test_default_temp_dir(self): b = BlockingFeedStorage() tmp = b.open(self.get_test_spider()) tmp_path = os.path.dirname(tmp.name) self.assertEqual(tmp_path, tempfile.gettempdir()) def test_temp_file(self): b = BlockingFeedStorage() tests_path = os.path.dirname(os.path.abspath(__file__)) spider = self.get_test_spider({'FEED_TEMPDIR': tests_path}) tmp = b.open(spider) tmp_path = os.path.dirname(tmp.name) self.assertEqual(tmp_path, tests_path) def test_invalid_folder(self): b = BlockingFeedStorage() tests_path = os.path.dirname(os.path.abspath(__file__)) invalid_path = os.path.join(tests_path, 'invalid_path') spider = self.get_test_spider({'FEED_TEMPDIR': invalid_path}) self.assertRaises(OSError, b.open, spider=spider) class S3FeedStorageTest(unittest.TestCase): @defer.inlineCallbacks def test_store(self): assert_aws_environ() uri = os.environ.get('S3_TEST_FILE_URI') if not uri: raise unittest.SkipTest("No S3 URI available for testing") storage = S3FeedStorage(uri) verifyObject(IFeedStorage, storage) file = storage.open(scrapy.Spider("default")) expected_content = b"content: \xe2\x98\x83" file.write(expected_content) yield storage.store(file) u = urlparse(uri) content = get_s3_content_and_delete(u.hostname, u.path[1:]) self.assertEqual(content, expected_content) class StdoutFeedStorageTest(unittest.TestCase): @defer.inlineCallbacks def test_store(self): out = BytesIO() storage = StdoutFeedStorage('stdout:', _stdout=out) file = storage.open(scrapy.Spider("default")) file.write(b"content") yield storage.store(file) self.assertEqual(out.getvalue(), b"content") class FeedExportTest(unittest.TestCase): class MyItem(scrapy.Item): foo = scrapy.Field() egg = scrapy.Field() baz = scrapy.Field() @defer.inlineCallbacks def run_and_export(self, spider_cls, settings=None): """ Run spider with specified settings; return exported data. """ tmpdir = tempfile.mkdtemp() res_name = tmpdir + '/res' defaults = { 'FEED_URI': 'file://' + res_name, 'FEED_FORMAT': 'csv', } defaults.update(settings or {}) try: with MockServer() as s: runner = CrawlerRunner(Settings(defaults)) yield runner.crawl(spider_cls) with open(res_name, 'rb') as f: defer.returnValue(f.read()) finally: shutil.rmtree(tmpdir) @defer.inlineCallbacks def exported_data(self, items, settings): """ Return exported data which a spider yielding ``items`` would return. """ class TestSpider(scrapy.Spider): name = 'testspider' start_urls = ['http://localhost:8998/'] def parse(self, response): for item in items: yield item data = yield self.run_and_export(TestSpider, settings) defer.returnValue(data) @defer.inlineCallbacks def exported_no_data(self, settings): """ Return exported data which a spider yielding no ``items`` would return. """ class TestSpider(scrapy.Spider): name = 'testspider' start_urls = ['http://localhost:8998/'] def parse(self, response): pass <|fim▁hole|> defer.returnValue(data) @defer.inlineCallbacks def assertExportedCsv(self, items, header, rows, settings=None, ordered=True): settings = settings or {} settings.update({'FEED_FORMAT': 'csv'}) data = yield self.exported_data(items, settings) reader = csv.DictReader(to_native_str(data).splitlines()) got_rows = list(reader) if ordered: self.assertEqual(reader.fieldnames, header) else: self.assertEqual(set(reader.fieldnames), set(header)) self.assertEqual(rows, got_rows) @defer.inlineCallbacks def assertExportedJsonLines(self, items, rows, settings=None): settings = settings or {} settings.update({'FEED_FORMAT': 'jl'}) data = yield self.exported_data(items, settings) parsed = [json.loads(to_native_str(line)) for line in data.splitlines()] rows = [{k: v for k, v in row.items() if v} for row in rows] self.assertEqual(rows, parsed) @defer.inlineCallbacks def assertExportedXml(self, items, rows, settings=None): settings = settings or {} settings.update({'FEED_FORMAT': 'xml'}) data = yield self.exported_data(items, settings) rows = [{k: v for k, v in row.items() if v} for row in rows] import lxml.etree root = lxml.etree.fromstring(data) got_rows = [{e.tag: e.text for e in it} for it in root.findall('item')] self.assertEqual(rows, got_rows) def _load_until_eof(self, data, load_func): bytes_output = BytesIO(data) result = [] while True: try: result.append(load_func(bytes_output)) except EOFError: break return result @defer.inlineCallbacks def assertExportedPickle(self, items, rows, settings=None): settings = settings or {} settings.update({'FEED_FORMAT': 'pickle'}) data = yield self.exported_data(items, settings) expected = [{k: v for k, v in row.items() if v} for row in rows] import pickle result = self._load_until_eof(data, load_func=pickle.load) self.assertEqual(expected, result) @defer.inlineCallbacks def assertExportedMarshal(self, items, rows, settings=None): settings = settings or {} settings.update({'FEED_FORMAT': 'marshal'}) data = yield self.exported_data(items, settings) expected = [{k: v for k, v in row.items() if v} for row in rows] import marshal result = self._load_until_eof(data, load_func=marshal.load) self.assertEqual(expected, result) @defer.inlineCallbacks def assertExported(self, items, header, rows, settings=None, ordered=True): yield self.assertExportedCsv(items, header, rows, settings, ordered) yield self.assertExportedJsonLines(items, rows, settings) yield self.assertExportedXml(items, rows, settings) yield self.assertExportedPickle(items, rows, settings) @defer.inlineCallbacks def test_export_items(self): # feed exporters use field names from Item items = [ self.MyItem({'foo': 'bar1', 'egg': 'spam1'}), self.MyItem({'foo': 'bar2', 'egg': 'spam2', 'baz': 'quux2'}), ] rows = [ {'egg': 'spam1', 'foo': 'bar1', 'baz': ''}, {'egg': 'spam2', 'foo': 'bar2', 'baz': 'quux2'} ] header = self.MyItem.fields.keys() yield self.assertExported(items, header, rows, ordered=False) @defer.inlineCallbacks def test_export_no_items_not_store_empty(self): formats = ('json', 'jsonlines', 'xml', 'csv',) for fmt in formats: settings = {'FEED_FORMAT': fmt} data = yield self.exported_no_data(settings) self.assertEqual(data, b'') @defer.inlineCallbacks def test_export_no_items_store_empty(self): formats = ( ('json', b'[\n\n]'), ('jsonlines', b''), ('xml', b'<?xml version="1.0" encoding="utf-8"?>\n<items></items>'), ('csv', b''), ) for fmt, expctd in formats: settings = {'FEED_FORMAT': fmt, 'FEED_STORE_EMPTY': True} data = yield self.exported_no_data(settings) self.assertEqual(data, expctd) @defer.inlineCallbacks def test_export_multiple_item_classes(self): class MyItem2(scrapy.Item): foo = scrapy.Field() hello = scrapy.Field() items = [ self.MyItem({'foo': 'bar1', 'egg': 'spam1'}), MyItem2({'hello': 'world2', 'foo': 'bar2'}), self.MyItem({'foo': 'bar3', 'egg': 'spam3', 'baz': 'quux3'}), {'hello': 'world4', 'egg': 'spam4'}, ] # by default, Scrapy uses fields of the first Item for CSV and # all fields for JSON Lines header = self.MyItem.fields.keys() rows_csv = [ {'egg': 'spam1', 'foo': 'bar1', 'baz': ''}, {'egg': '', 'foo': 'bar2', 'baz': ''}, {'egg': 'spam3', 'foo': 'bar3', 'baz': 'quux3'}, {'egg': 'spam4', 'foo': '', 'baz': ''}, ] rows_jl = [dict(row) for row in items] yield self.assertExportedCsv(items, header, rows_csv, ordered=False) yield self.assertExportedJsonLines(items, rows_jl) # edge case: FEED_EXPORT_FIELDS==[] means the same as default None settings = {'FEED_EXPORT_FIELDS': []} yield self.assertExportedCsv(items, header, rows_csv, ordered=False) yield self.assertExportedJsonLines(items, rows_jl, settings) # it is possible to override fields using FEED_EXPORT_FIELDS header = ["foo", "baz", "hello"] settings = {'FEED_EXPORT_FIELDS': header} rows = [ {'foo': 'bar1', 'baz': '', 'hello': ''}, {'foo': 'bar2', 'baz': '', 'hello': 'world2'}, {'foo': 'bar3', 'baz': 'quux3', 'hello': ''}, {'foo': '', 'baz': '', 'hello': 'world4'}, ] yield self.assertExported(items, header, rows, settings=settings, ordered=True) @defer.inlineCallbacks def test_export_dicts(self): # When dicts are used, only keys from the first row are used as # a header for CSV, and all fields are used for JSON Lines. items = [ {'foo': 'bar', 'egg': 'spam'}, {'foo': 'bar', 'egg': 'spam', 'baz': 'quux'}, ] rows_csv = [ {'egg': 'spam', 'foo': 'bar'}, {'egg': 'spam', 'foo': 'bar'} ] rows_jl = items yield self.assertExportedCsv(items, ['egg', 'foo'], rows_csv, ordered=False) yield self.assertExportedJsonLines(items, rows_jl) @defer.inlineCallbacks def test_export_feed_export_fields(self): # FEED_EXPORT_FIELDS option allows to order export fields # and to select a subset of fields to export, both for Items and dicts. for item_cls in [self.MyItem, dict]: items = [ item_cls({'foo': 'bar1', 'egg': 'spam1'}), item_cls({'foo': 'bar2', 'egg': 'spam2', 'baz': 'quux2'}), ] # export all columns settings = {'FEED_EXPORT_FIELDS': 'foo,baz,egg'} rows = [ {'egg': 'spam1', 'foo': 'bar1', 'baz': ''}, {'egg': 'spam2', 'foo': 'bar2', 'baz': 'quux2'} ] yield self.assertExported(items, ['foo', 'baz', 'egg'], rows, settings=settings, ordered=True) # export a subset of columns settings = {'FEED_EXPORT_FIELDS': 'egg,baz'} rows = [ {'egg': 'spam1', 'baz': ''}, {'egg': 'spam2', 'baz': 'quux2'} ] yield self.assertExported(items, ['egg', 'baz'], rows, settings=settings, ordered=True) @defer.inlineCallbacks def test_export_encoding(self): items = [dict({'foo': u'Test\xd6'})] header = ['foo'] formats = { 'json': u'[\n{"foo": "Test\\u00d6"}\n]'.encode('utf-8'), 'jsonlines': u'{"foo": "Test\\u00d6"}\n'.encode('utf-8'), 'xml': u'<?xml version="1.0" encoding="utf-8"?>\n<items><item><foo>Test\xd6</foo></item></items>'.encode('utf-8'), 'csv': u'foo\r\nTest\xd6\r\n'.encode('utf-8'), } for format in formats: settings = {'FEED_FORMAT': format} data = yield self.exported_data(items, settings) self.assertEqual(formats[format], data) formats = { 'json': u'[\n{"foo": "Test\xd6"}\n]'.encode('latin-1'), 'jsonlines': u'{"foo": "Test\xd6"}\n'.encode('latin-1'), 'xml': u'<?xml version="1.0" encoding="latin-1"?>\n<items><item><foo>Test\xd6</foo></item></items>'.encode('latin-1'), 'csv': u'foo\r\nTest\xd6\r\n'.encode('latin-1'), } for format in formats: settings = {'FEED_FORMAT': format, 'FEED_EXPORT_ENCODING': 'latin-1'} data = yield self.exported_data(items, settings) self.assertEqual(formats[format], data)<|fim▁end|>
data = yield self.run_and_export(TestSpider, settings)
<|file_name|>HttpError.spec.js<|end_file_name|><|fim▁begin|>const describe = require("mocha").describe; const it = require("mocha").it; const assert = require("chai").assert; const HttpError = require("./HttpError"); describe("HttpError", function () { it("should be instance of Error", function () { const testSubject = new HttpError();<|fim▁hole|> }); });<|fim▁end|>
assert.isOk(testSubject instanceof Error);
<|file_name|>abortable.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use super::assert_stream; use crate::stream::{AbortHandle, Abortable}; use crate::Stream; /// Creates a new `Abortable` stream and an `AbortHandle` which can be used to stop it. /// /// This function is a convenient (but less flexible) alternative to calling /// `AbortHandle::new` and `Abortable::new` manually. /// /// This function is only available when the `std` or `alloc` feature of this /// library is activated, and it is activated by default. pub fn abortable<St>(stream: St) -> (Abortable<St>, AbortHandle) where St: Stream, { let (handle, reg) = AbortHandle::new_pair(); let abortable = assert_stream::<St::Item, _>(Abortable::new(stream, reg)); (abortable, handle) }<|fim▁end|>
<|file_name|>phone.spec.ts<|end_file_name|><|fim▁begin|>describe('validation phone', function () { var $scope, $compile, element; beforeEach(angular.mock.module('dd.ui.validation.phone')); beforeEach(inject(function ($rootScope, _$compile_) { $scope = $rootScope; $compile = _$compile_; })); describe('general phone', function(){ beforeEach(function() { element = $compile('<form name="forma"><input type="text" name="phone" ng-model="phone" phone /></form>')($scope); }); it('should mark valid empty', function(){<|fim▁hole|> expect($scope.forma.phone.$invalid).toBe(false); }); it('should mark valid when changed from invalid to empty', function(){ $scope.phone = '+123'; $scope.$digest(); $scope.phone = ''; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(false); }); it('should mark valid null', function(){ $scope.phone = null; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(false); }); it('should mark valid 13 numbers starting with plus', function(){ $scope.phone = '+1234567890123'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(false); expect($scope.forma.phone.$error).toEqual({}); }); it('should mark invalid 13 numbers wo plus sign', function(){ $scope.phone = '1234567890123'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(true); expect($scope.forma.phone.$error.phone).toBe(true); }); it('should mark invalid 9 numbers starting with plus', function(){ $scope.phone = '+123456789'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(true); }); it('should mark invalid 15 numbers starting with plus', function(){ $scope.phone = '+123456789012345'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(true); }); it('should mark invalid 13 numbers with one letter', function(){ $scope.phone = '+1234567890123a'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(true); }); }); describe('phoneCountryCode', function(){ beforeEach(function() { element = $compile('<form name="forma"><input type="text" name="phone" ng-model="phone" phone-country-code /></form>')($scope); }); it('should mark valid empty', function(){ $scope.phone = ''; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(false); }); it('should mark valid when changed from invalid to empty', function(){ $scope.phone = 'aaa'; $scope.$digest(); $scope.phone = ''; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(false); }); it('should mark valid null', function(){ $scope.phone = null; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(false); }); it('should mark valid 3 numbers starting with plus', function(){ $scope.phone = '+123'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(false); expect($scope.forma.phone.$error).toEqual({}); }); it('should mark valid 1 number starting with plus', function(){ $scope.phone = '+1'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(false); }); it('should mark invalid 3 numbers wo plus sign', function(){ $scope.phone = '123'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(true); expect($scope.forma.phone.$error.phoneCountryCode).toBe(true); }); it('should mark invalid 4 numbers starting with plus', function(){ $scope.phone = '+1234'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(true); }); it('should mark invalid 2 numbers with one letter', function(){ $scope.phone = '+12a'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(true); }); }); describe('phoneWoCountryCode', function(){ beforeEach(function() { element = $compile('<form name="forma"><input type="text" name="phone" ng-model="phone" phone-wo-country-code /></form>')($scope); }); it('should mark valid empty', function(){ $scope.phone = ''; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(false); }); it('should mark valid when changed from invalid to empty', function(){ $scope.phone = 'aaa'; $scope.$digest(); $scope.phone = ''; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(false); }); it('should mark valid null', function(){ $scope.phone = null; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(false); }); it('should mark valid 7 numbers', function(){ $scope.phone = '1234567'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(false); expect($scope.forma.phone.$error).toEqual({}); }); it('should mark invalid 7 numbers with plus sign', function(){ $scope.phone = '+1234567'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(true); expect($scope.forma.phone.$error.phoneWoCountryCode).toBe(true); }); it('should mark invalid 14 numbers', function(){ $scope.phone = '12345678901234'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(true); }); it('should mark invalid 7 numbers with one letter', function(){ $scope.phone = '1234567a'; $scope.$digest(); expect($scope.forma.phone.$invalid).toBe(true); }); }); });<|fim▁end|>
$scope.phone = ''; $scope.$digest();
<|file_name|>meta.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- class AutocompleteMeta: """ Simple meta class to allow the model to define aspects of the autocomplete. :var name: used for the named url<|fim▁hole|> * if ``permissions`` ``False`` (default) no authentication is checked. * if ``permissions`` ``True`` then request.user must be authenticated. * if ``permissions`` ``string`` then request.user must have the permission defined by ``string``. * if ``permissions`` ``iter`` then request.user must have all the permissionis defined in the ``iter`` See :class:`django_autocomplete.views.AutocompleteView` for more clarification. For example as a simple object: >>> from django_autocomplete.meta import AutocompleteMeta >>> class TestModel(object): ... autocomplete = AutocompleteMeta( ... name='silly', ... path='api/filter/silly', ... ) The model autocomplete configures the model for use: >>> m = TestModel() >>> m.autocomplete <django_autocomplete.meta.AutocompleteMeta object at 0x...> >>> m.autocomplete.path 'api/filter/silly' >>> m.autocomplete.name 'silly' >>> m.autocomplete.follow_fks True >>> m.autocomplete.fields [] """ name = '' path = '' fields = [] permissions = None follow_fks = True def __init__(self, autocomplete=None, **kwargs): if autocomplete: autocomplete_attrs = autocomplete.__dict__ else: autocomplete_attrs = kwargs for attr in self.__class__.__dict__: if attr in autocomplete_attrs: self.__dict__[attr] = autocomplete_attrs[attr]<|fim▁end|>
:var path: the path to autocomplete view :var follow_fks: when searching should ForeignKey fields be followed. :var fields: list of fields, if empty then all searchable fields are used :var permissions: bool, string or iter
<|file_name|>tasks.go<|end_file_name|><|fim▁begin|>package store import ( "strconv" "strings" "github.com/docker/swarmkit/api" "github.com/docker/swarmkit/api/naming" "github.com/docker/swarmkit/manager/state" memdb "github.com/hashicorp/go-memdb" ) const tableTask = "task" func init() { register(ObjectStoreConfig{ Name: tableTask, Table: &memdb.TableSchema{ Name: tableTask, Indexes: map[string]*memdb.IndexSchema{ indexID: { Name: indexID, Unique: true, Indexer: taskIndexerByID{}, }, indexName: { Name: indexName, AllowMissing: true, Indexer: taskIndexerByName{}, }, indexServiceID: { Name: indexServiceID, AllowMissing: true, Indexer: taskIndexerByServiceID{}, }, indexNodeID: { Name: indexNodeID, AllowMissing: true, Indexer: taskIndexerByNodeID{}, }, indexSlot: { Name: indexSlot, AllowMissing: true, Indexer: taskIndexerBySlot{}, }, indexDesiredState: { Name: indexDesiredState, Indexer: taskIndexerByDesiredState{}, }, indexNetwork: { Name: indexNetwork, AllowMissing: true, Indexer: taskIndexerByNetwork{}, }, indexSecret: { Name: indexSecret, AllowMissing: true, Indexer: taskIndexerBySecret{}, }, }, }, Save: func(tx ReadTx, snapshot *api.StoreSnapshot) error { var err error snapshot.Tasks, err = FindTasks(tx, All) return err }, Restore: func(tx Tx, snapshot *api.StoreSnapshot) error { tasks, err := FindTasks(tx, All) if err != nil { return err } for _, t := range tasks { if err := DeleteTask(tx, t.ID); err != nil { return err } } for _, t := range snapshot.Tasks { if err := CreateTask(tx, t); err != nil { return err } } return nil }, ApplyStoreAction: func(tx Tx, sa *api.StoreAction) error { switch v := sa.Target.(type) { case *api.StoreAction_Task: obj := v.Task switch sa.Action { case api.StoreActionKindCreate: return CreateTask(tx, obj) case api.StoreActionKindUpdate: return UpdateTask(tx, obj) case api.StoreActionKindRemove: return DeleteTask(tx, obj.ID) } } return errUnknownStoreAction }, NewStoreAction: func(c state.Event) (api.StoreAction, error) { var sa api.StoreAction switch v := c.(type) { case state.EventCreateTask: sa.Action = api.StoreActionKindCreate sa.Target = &api.StoreAction_Task{ Task: v.Task, } case state.EventUpdateTask: sa.Action = api.StoreActionKindUpdate<|fim▁hole|> case state.EventDeleteTask: sa.Action = api.StoreActionKindRemove sa.Target = &api.StoreAction_Task{ Task: v.Task, } default: return api.StoreAction{}, errUnknownStoreAction } return sa, nil }, }) } type taskEntry struct { *api.Task } func (t taskEntry) ID() string { return t.Task.ID } func (t taskEntry) Meta() api.Meta { return t.Task.Meta } func (t taskEntry) SetMeta(meta api.Meta) { t.Task.Meta = meta } func (t taskEntry) Copy() Object { return taskEntry{t.Task.Copy()} } func (t taskEntry) EventCreate() state.Event { return state.EventCreateTask{Task: t.Task} } func (t taskEntry) EventUpdate() state.Event { return state.EventUpdateTask{Task: t.Task} } func (t taskEntry) EventDelete() state.Event { return state.EventDeleteTask{Task: t.Task} } // CreateTask adds a new task to the store. // Returns ErrExist if the ID is already taken. func CreateTask(tx Tx, t *api.Task) error { return tx.create(tableTask, taskEntry{t}) } // UpdateTask updates an existing task in the store. // Returns ErrNotExist if the node doesn't exist. func UpdateTask(tx Tx, t *api.Task) error { return tx.update(tableTask, taskEntry{t}) } // DeleteTask removes a task from the store. // Returns ErrNotExist if the task doesn't exist. func DeleteTask(tx Tx, id string) error { return tx.delete(tableTask, id) } // GetTask looks up a task by ID. // Returns nil if the task doesn't exist. func GetTask(tx ReadTx, id string) *api.Task { t := tx.get(tableTask, id) if t == nil { return nil } return t.(taskEntry).Task } // FindTasks selects a set of tasks and returns them. func FindTasks(tx ReadTx, by By) ([]*api.Task, error) { checkType := func(by By) error { switch by.(type) { case byName, byNamePrefix, byIDPrefix, byDesiredState, byNode, byService, bySlot, byReferencedNetworkID, byReferencedSecretID: return nil default: return ErrInvalidFindBy } } taskList := []*api.Task{} appendResult := func(o Object) { taskList = append(taskList, o.(taskEntry).Task) } err := tx.find(tableTask, by, checkType, appendResult) return taskList, err } type taskIndexerByID struct{} func (ti taskIndexerByID) FromArgs(args ...interface{}) ([]byte, error) { return fromArgs(args...) } func (ti taskIndexerByID) FromObject(obj interface{}) (bool, []byte, error) { t, ok := obj.(taskEntry) if !ok { panic("unexpected type passed to FromObject") } // Add the null character as a terminator val := t.Task.ID + "\x00" return true, []byte(val), nil } func (ti taskIndexerByID) PrefixFromArgs(args ...interface{}) ([]byte, error) { return prefixFromArgs(args...) } type taskIndexerByName struct{} func (ti taskIndexerByName) FromArgs(args ...interface{}) ([]byte, error) { return fromArgs(args...) } func (ti taskIndexerByName) FromObject(obj interface{}) (bool, []byte, error) { t, ok := obj.(taskEntry) if !ok { panic("unexpected type passed to FromObject") } name := naming.Task(t.Task) // Add the null character as a terminator return true, []byte(strings.ToLower(name) + "\x00"), nil } func (ti taskIndexerByName) PrefixFromArgs(args ...interface{}) ([]byte, error) { return prefixFromArgs(args...) } type taskIndexerByServiceID struct{} func (ti taskIndexerByServiceID) FromArgs(args ...interface{}) ([]byte, error) { return fromArgs(args...) } func (ti taskIndexerByServiceID) FromObject(obj interface{}) (bool, []byte, error) { t, ok := obj.(taskEntry) if !ok { panic("unexpected type passed to FromObject") } // Add the null character as a terminator val := t.ServiceID + "\x00" return true, []byte(val), nil } type taskIndexerByNodeID struct{} func (ti taskIndexerByNodeID) FromArgs(args ...interface{}) ([]byte, error) { return fromArgs(args...) } func (ti taskIndexerByNodeID) FromObject(obj interface{}) (bool, []byte, error) { t, ok := obj.(taskEntry) if !ok { panic("unexpected type passed to FromObject") } // Add the null character as a terminator val := t.NodeID + "\x00" return true, []byte(val), nil } type taskIndexerBySlot struct{} func (ti taskIndexerBySlot) FromArgs(args ...interface{}) ([]byte, error) { return fromArgs(args...) } func (ti taskIndexerBySlot) FromObject(obj interface{}) (bool, []byte, error) { t, ok := obj.(taskEntry) if !ok { panic("unexpected type passed to FromObject") } // Add the null character as a terminator val := t.ServiceID + "\x00" + strconv.FormatUint(t.Slot, 10) + "\x00" return true, []byte(val), nil } type taskIndexerByDesiredState struct{} func (ti taskIndexerByDesiredState) FromArgs(args ...interface{}) ([]byte, error) { return fromArgs(args...) } func (ti taskIndexerByDesiredState) FromObject(obj interface{}) (bool, []byte, error) { t, ok := obj.(taskEntry) if !ok { panic("unexpected type passed to FromObject") } // Add the null character as a terminator return true, []byte(strconv.FormatInt(int64(t.DesiredState), 10) + "\x00"), nil } type taskIndexerByNetwork struct{} func (ti taskIndexerByNetwork) FromArgs(args ...interface{}) ([]byte, error) { return fromArgs(args...) } func (ti taskIndexerByNetwork) FromObject(obj interface{}) (bool, [][]byte, error) { t, ok := obj.(taskEntry) if !ok { panic("unexpected type passed to FromObject") } var networkIDs [][]byte for _, na := range t.Spec.Networks { // Add the null character as a terminator networkIDs = append(networkIDs, []byte(na.Target+"\x00")) } return len(networkIDs) != 0, networkIDs, nil } type taskIndexerBySecret struct{} func (ti taskIndexerBySecret) FromArgs(args ...interface{}) ([]byte, error) { return fromArgs(args...) } func (ti taskIndexerBySecret) FromObject(obj interface{}) (bool, [][]byte, error) { t, ok := obj.(taskEntry) if !ok { panic("unexpected type passed to FromObject") } container := t.Spec.GetContainer() if container == nil { return false, nil, nil } var secretIDs [][]byte for _, secretRef := range container.Secrets { // Add the null character as a terminator secretIDs = append(secretIDs, []byte(secretRef.SecretID+"\x00")) } return len(secretIDs) != 0, secretIDs, nil }<|fim▁end|>
sa.Target = &api.StoreAction_Task{ Task: v.Task, }
<|file_name|>conv_test.go<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
package conv
<|file_name|>daemon.py<|end_file_name|><|fim▁begin|># -*- encoding: utf8 -*- # A daemon to keep SSH forwarding connected from __future__ import print_function, absolute_import import os<|fim▁hole|>import logging class Daemon(object): def __init__(self): self.heartbeat = 50 def run(self): logging.basicConfig(filename='daemon.log') logging.error('daemon started') self.daemonize() while True: if not self.check_connection(): self.reconnect() logging.warn('reconnecting') time.sleep(self.heartbeat) def check_connection(self): c = socket.socket() try: c.connect(('localhost', 3366)) c.close() return True except socket.error: return False def daemonize(self): pid = os.fork() if pid: os.waitpid(pid, os.WNOHANG) sys.exit(0) return def reconnect(self): pid = os.fork() if pid == 0: # child err = os.execlp('/usr/bin/ssh', 'ssh', '-i', '/home/xu/.ssh/id_rsa', '-L', '3366:127.0.0.1:3306', '-p', '42022', '[email protected]') if err: logging.error("error to execlp") sys.exit(1) elif pid > 0: os.waitpid(pid, 0) else: logging.error('error to fork') sys.exit(2) if __name__ == '__main__': Daemon().run()<|fim▁end|>
import sys import time import socket
<|file_name|>rand_augment.py<|end_file_name|><|fim▁begin|># Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Random augment.""" import tensorflow as tf from augment import augment_ops # Reference for Imagenet: # https://cs.corp.google.com/piper///depot/google3/learning/brain/research/meta_architect/image/image_processing.py?rcl=275474938&l=2950 IMAGENET_AUG_OPS = [ 'AutoContrast', 'Equalize', 'Invert', 'Rotate', 'Posterize', 'Solarize', 'Color', 'Contrast', 'Brightness', 'Sharpness', 'ShearX', 'ShearY', 'TranslateX', 'TranslateY', 'SolarizeAdd', 'Identity', ] # Levels in this file are assumed to be floats in [0, 1] range # If you need quantization or integer levels, this should be controlled # in client code. MAX_LEVEL = 1. # Constant which is used when computing translation argument from level TRANSLATE_CONST = 100. def _randomly_negate_tensor(tensor): """With 50% prob turn the tensor negative.""" should_flip = tf.cast(tf.floor(tf.random.uniform([]) + 0.5), tf.bool) final_tensor = tf.cond(should_flip, lambda: tensor, lambda: -tensor) return final_tensor def _rotate_level_to_arg(level): level = (level/MAX_LEVEL) * 30. level = _randomly_negate_tensor(level) return (level,) def _enhance_level_to_arg(level): return ((level/MAX_LEVEL) * 1.8 + 0.1,) def _shear_level_to_arg(level): level = (level/MAX_LEVEL) * 0.3 # Flip level to negative with 50% chance level = _randomly_negate_tensor(level) return (level,) def _translate_level_to_arg(level): level = (level/MAX_LEVEL) * TRANSLATE_CONST # Flip level to negative with 50% chance level = _randomly_negate_tensor(level) return (level,) def _posterize_level_to_arg(level): return (int((level/MAX_LEVEL) * 4),) def _solarize_level_to_arg(level): return (int((level/MAX_LEVEL) * 256),) def _solarize_add_level_to_arg(level): return (int((level/MAX_LEVEL) * 110),) def _ignore_level_to_arg(level): del level return () def _divide_level_by_max_level_arg(level): return (level/MAX_LEVEL,) LEVEL_TO_ARG = { 'AutoContrast': _ignore_level_to_arg, 'Equalize': _ignore_level_to_arg, 'Invert': _ignore_level_to_arg, 'Rotate': _rotate_level_to_arg, 'Posterize': _posterize_level_to_arg, 'Solarize': _solarize_level_to_arg, 'SolarizeAdd': _solarize_add_level_to_arg, 'Color': _enhance_level_to_arg, 'Contrast': _enhance_level_to_arg, 'Brightness': _enhance_level_to_arg, 'Sharpness': _enhance_level_to_arg, 'ShearX': _shear_level_to_arg, 'ShearY': _shear_level_to_arg, 'TranslateX': _translate_level_to_arg, 'TranslateY': _translate_level_to_arg, 'Identity': _ignore_level_to_arg, 'Blur': _divide_level_by_max_level_arg, 'Smooth': _divide_level_by_max_level_arg, 'Rescale': _divide_level_by_max_level_arg, } class RandAugment(object): """Random augment with fixed magnitude.""" def __init__(self, num_layers=2, prob_to_apply=None, magnitude=None, num_levels=10): """Initialized rand augment. Args: num_layers: number of augmentation layers, i.e. how many times to do augmentation. prob_to_apply: probability to apply on each layer. If None then always apply. magnitude: default magnitude in range [0, 1], if None then magnitude will be chosen randomly. num_levels: number of levels for quantization of the magnitude. """ self.num_layers = num_layers self.prob_to_apply = ( float(prob_to_apply) if prob_to_apply is not None else None) self.num_levels = int(num_levels) if num_levels else None self.level = float(magnitude) if magnitude is not None else None def _get_level(self): if self.level is not None:<|fim▁hole|> level = tf.random.uniform(shape=[], maxval=self.num_levels + 1, dtype=tf.int32) return tf.cast(level, tf.float32) / self.num_levels def _apply_one_layer(self, image): """Applies one level of augmentation to the image.""" level = self._get_level() branch_fns = [] for augment_op_name in IMAGENET_AUG_OPS: augment_fn = augment_ops.NAME_TO_FUNC[augment_op_name] level_to_args_fn = LEVEL_TO_ARG[augment_op_name] def _branch_fn(image=image, augment_fn=augment_fn, level_to_args_fn=level_to_args_fn): args = [image] + list(level_to_args_fn(level)) return augment_fn(*args) branch_fns.append(_branch_fn) branch_index = tf.random.uniform( shape=[], maxval=len(branch_fns), dtype=tf.int32) aug_image = tf.switch_case(branch_index, branch_fns, default=lambda: image) if self.prob_to_apply is not None: return tf.cond( tf.random.uniform(shape=[], dtype=tf.float32) < self.prob_to_apply, lambda: aug_image, lambda: image) else: return aug_image def __call__(self, image, aug_image_key='image'): output_dict = {} if aug_image_key is not None: aug_image = image for _ in range(self.num_layers): aug_image = self._apply_one_layer(aug_image) output_dict[aug_image_key] = aug_image if aug_image_key != 'image': output_dict['image'] = image return output_dict<|fim▁end|>
return tf.convert_to_tensor(self.level) if self.num_levels is None: return tf.random.uniform(shape=[], dtype=tf.float32) else:
<|file_name|>get_bom_grid.py<|end_file_name|><|fim▁begin|># Fetch BoM gridded files from the Bureau server. import sys from datetime import datetime import calendar import subprocess import os.path R = 'http://www.bom.gov.au/web03/ncc/www/awap' WGET = '/usr/bin/wget' <|fim▁hole|>vars = { 'rain/day' : R+'/rainfall/totals/daily/grid/0.05/history/nat', 'rain_rmse/day' : R+'/rainfall/rmse/daily/grid/0.05/history/nat', 'rain_recal/day' : R+'/rainfall/totals/daily/grid/0.05/history/nat_recal', 'tmax/day' : R+'/temperature/maxave/daily/grid/0.05/history/nat', 'tmax_rmse/day' : R+'/temperature/maxrmse/daily/grid/0.05/history/nat', 'tmin/day' : R+'/temperature/minave/daily/grid/0.05/history/nat', 'tmin_rmse/day' : R+'/temperature/minrmse/daily/grid/0.05/history/nat', 'rad/day' : R+'/solar/solarave/daily/grid/0.05/history/nat', 'vph09/day' : R+'/vprp/vprph09/daily/grid/0.05/history/nat', 'vph15/day' : R+'/vprp/vprph15/daily/grid/0.05/history/nat', 'rain/month' : R+'/rainfall/totals/month/grid/0.05/history/nat', 'rain_rmse/month' : R+'/rainfall/rmse/month/grid/0.05/history/nat', 'tmax/month' : R+'/temperature/maxave/month/grid/0.05/history/nat', 'tmax_rmse/month' : R+'/temperature/maxrmse/month/grid/0.05/history/nat', 'tmin/month' : R+'/temperature/minave/month/grid/0.05/history/nat', 'tmin_rmse/month' : R+'/temperature/minrmse/month/grid/0.05/history/nat', 'rad/month' : R+'/solar/solarave/month/grid/0.05/history/nat', 'vph09/month' : R+'/vprp/vprph09/month/grid/0.05/history/nat', 'vph15/month' : R+'/vprp/vprph15/month/grid/0.05/history/nat' } filePrefix = { 'rain/day' : 'rain_daily', 'rain_rmse/day' : 'rain_rmse_daily', 'rain_recal/day' : 'rain_recal_daily', 'tmax/day' : 'tmax_daily', 'tmax_rmse/day' : 'tmax_rmse_daily', 'tmin/day' : 'tmin_daily', 'tmin_rmse/day' : 'tmin_rmse_daily', 'rad/day' : 'rad_daily', 'vph09/day' : 'vph09_daily', 'vph15/day' : 'vph15_daily', 'rain/month' : 'rain_month', 'rain_rmse/month' : 'rain_rmse_month', 'tmax/month' : 'tmax_month', 'tmax_rmse/month' : 'tmax_rmse_month', 'tmin/month' : 'tmin_month', 'tmin_rmse/month' : 'tmin_rmse_month', 'rad/month' : 'rad_month', 'vph09/month' : 'vph09_month', 'vph15/month' : 'vph15_month', } def get_bom_grid(var, yyyymmdd, month): """ Given a variable VAR and date YYYYMMDD, fetch the .grid file from the Bureau server using wget. If MONTH != 0, monthly files are fetched rather than daily. Return the name of the file fetched. """ monthday = (31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) interval = 'day' if month == 0 else 'month' key = var+'/'+interval if key not in vars: raise ValueError("No such var ("+var+")") try: s = datetime.strptime(yyyymmdd, "%Y%m%d") except ValueError: raise ValueError("Invalid date ("+yyyymmdd+")") if month: if s.month == 2 and calendar.isleap(s.year): monthday[2] = 29 fname = "%04d%02d01%04d%02d%02d.grid.Z" % (s.year, s.month, s.year, s.month, monthday[s.month-1]) else: fname = yyyymmdd+yyyymmdd+".grid.Z" out = filePrefix[key]+'_'+fname if os.path.exists(out): raise RuntimeError("File already exists ("+out+")") url = vars[key]+"/"+fname if subprocess.call([WGET, '--output-document='+out, url]): os.remove(out) raise IOError("Calling %s --output-document=%s %s" % (WGET, out, url)) return out if __name__ == '__main__': if len(sys.argv) != 4: print "USAGE: python get_bom_grid.py VAR YYYYMMDD MONTH(1/0)" sys.exit(1) try: fname = get_bom_grid(sys.argv[1], sys.argv[2], int(sys.argv[3])) print "Fetched file: "+fname except ValueError as e: print "Aborted - erroneous arguments: %s" % e sys.exit(1) except IOError as e: print "URL not found: %s" % e sys.exit(1) except RuntimeError as e: print "File of same name present: %s" % e sys.exit(1) sys.exit(0)<|fim▁end|>
# Append yyyymmddyyyymmdd.grid.Z to these URLs to get the file
<|file_name|>search_result.go<|end_file_name|><|fim▁begin|>package main import ( "fmt" "sort" ) // I haven't included fields I don't think are relevant in these Structs, even though // they might be part of the data GNIP store in the Archive. // This is mostly a Twitter tweet but includes some more information like Klout since that's // an enrichment added to the bot's account. type Result struct { Tweets []Tweet `json:"results"`<|fim▁hole|>type Tweet struct { CreatedAt string `json:"created_at"` TweetId int64 `json:"id"` User User `json:"user"` FilterLevel string `json:"filter_level"` Lang string `json:"lang"` } type User struct { UserId int64 `json:"id"` Name string `json:"name"` ScreenName string `json:"screen_name"` Derived Derived `json:"derived"` Protected bool `json:"protected"` FollowersCt int64 `json:"followers_count"` FriendsCt int64 `json:"friends_count"` Following bool `json:"following"` StatusesCt int64 `json:"statuses_count"` Lang string `json:"lang"` } func (u User) String() string { return fmt.Sprintf("%s: %s [%d] lang:%s", u.Name, u.ScreenName, u.UserId, u.Lang) } type Derived struct { Klout Klout `json:"klout"` } type Klout struct { Score int64 `json:"score"` InfluenceTopics []Topic `json:"influence_topics"` InterestTopics []Topic `json:"interest_topics"` } type Topic struct { Score float32 `json:"score"` Name string `json:"name"` } func (t Topic) String() string { return fmt.Sprintf("%s (%.2f)", t.Name, t.Score) } // sorting stuff so we can sort by users' Klout scores // https://golang.org/pkg/sort/#example_Interface type By func(a, b *User) bool func (by By) Sort(users []User) { ts := &tweeterSorter{ users: users, by: by, } sort.Sort(ts) } type tweeterSorter struct { users []User by func(a, b *User) bool } func (ts *tweeterSorter) Len() int { return len(ts.users) } func (ts *tweeterSorter) Swap(i, j int) { ts.users[i], ts.users[j] = ts.users[j], ts.users[i] } func (ts *tweeterSorter) Less(i, j int) bool { return ts.by(&ts.users[i], &ts.users[j]) }<|fim▁end|>
}
<|file_name|>demo.js<|end_file_name|><|fim▁begin|>module.exports = function(locker) { /* locker.add(function(callback) { //Return content in format: callback({ name: "Vehicle Speed", type: "metric", content: { x: 0, y: 0, xtitle: "Time", ytitle: "Speed"<|fim▁hole|> tags: ["vehicle", "speed", "velocity", "car"] }); }); locker.add(function(callback) { //Return content in format: callback({ name: "Vehicle RPM", type: "metric", content: { x: 0, y: 0, xtitle: "Time", ytitle: "Revolutions Per Minute" }, tags: ["vehicle", "rpm", "revolutions", "car"] }); }); locker.add(function(callback) { callback({ name: "Image test", type: "image", content: { url: "http://24.media.tumblr.com/tumblr_mc6vgcDUEK1qmbg8bo1_500.jpg" }, tags: ["vehicle", "rpm", "revolutions", "car"] }); }); */ }<|fim▁end|>
},
<|file_name|>market_sim.py<|end_file_name|><|fim▁begin|>import datetime as dt import numpy as np import pandas as pd # QSTK Imports import QSTK.qstkutil.DataAccess as da import QSTK.qstkutil.qsdateutil as du def get_orders_list(s_file_path): l_columns = ["year", "month", "day", "sym", "type", "num"] df_orders_list = pd.read_csv(s_file_path, sep=',', header=None) df_orders_list = df_orders_list.dropna(axis=1, how='all') df_orders_list.columns = l_columns return df_orders_list def get_orders(df_orders_list): na_orders_list = df_orders_list.values l_orders = [] ld_daily_orders = None for order in na_orders_list: dt_date = dt.datetime(order[0], order[1], order[2], hour=16) d_order = {df_orders_list.columns[3]: order[3], \ df_orders_list.columns[4]: order[4], \ df_orders_list.columns[5]: int(order[5])} if l_orders != [] and dt_date == l_orders[-1][0]: l_orders[-1][1].append(d_order) else: ld_daily_orders = [] ld_daily_orders.append(d_order) l_orders.append([dt_date, ld_daily_orders]) na_orders = np.array(l_orders) df_orders = pd.DataFrame(na_orders[:, 1], index=na_orders[:, 0], columns=["ord"])<|fim▁hole|> ls_symbols = list(set(df_orders_list["sym"])) ls_symbols.sort() # It is neccesary to sort due the use of set return df_orders, dt_start, dt_end, ls_symbols def get_data(dt_start, dt_end, ls_symbols): ldt_timestamps = du.getNYSEdays(dt_start, dt_end, dt.timedelta(hours=16)) ls_keys = ["open", "high", "low", "close", "volume", "actual_close"] dataobj = da.DataAccess('Yahoo') ldf_data = dataobj.get_data(ldt_timestamps, ls_symbols, ls_keys) d_data = dict(zip(ls_keys, ldf_data)) for s_key in ls_keys: d_data[s_key] = d_data[s_key].fillna(method="ffill") d_data[s_key] = d_data[s_key].fillna(method="bfill") d_data[s_key] = d_data[s_key].fillna(1.0) return d_data def get_prices(dt_start, dt_end, ls_symbols, s_key="close"): # close = adjusted close # actual_close = actual close d_data = get_data(dt_start, dt_end, ls_symbols) return d_data[s_key] def process_daily_orders(dt_date, df_orders, df_prices, df_num, df_val, df_res): op = 0 daily_orders = list(df_orders.ix[dt_date, "ord"]) for order in daily_orders: if order["type"] == "Buy": op = 1 elif order["type"] == "Sell": op = -1 df_num.ix[dt_date, order["sym"]] += op * order["num"] df_res.ix[dt_date, "cash"] += -op * order["num"] * df_prices.ix[dt_date, order["sym"]] def update_port(dt_date, dt_last_orders_date, ls_symbols, df_num, df_res): for s_symbol in ls_symbols: df_num.ix[dt_date, s_symbol] = df_num.ix[dt_last_orders_date, s_symbol] df_res.ix[dt_date, "cash"] = df_res.ix[dt_last_orders_date, "cash"] def value_port(dt_date, ls_symbols, df_prices, df_num, df_val, df_res): for s_symbol in ls_symbols: df_val.ix[dt_date, s_symbol] = df_num.ix[dt_date, s_symbol] * df_prices.ix[dt_date, s_symbol] df_res.ix[dt_date, "port"] = np.sum(df_val.ix[dt_date, :]) df_res.ix[dt_date, "total"] = df_res.ix[dt_date, "port"] + df_res.ix[dt_date, "cash"] def process_orders(df_orders, df_prices, cash): ldt_dates = list(df_prices.index) ls_symbols = list(df_prices.columns) df_num = pd.DataFrame(index=ldt_dates, columns=ls_symbols) df_val = pd.DataFrame(index=ldt_dates, columns=ls_symbols) df_res = pd.DataFrame(index=ldt_dates, columns=["port", "cash", "total"]) df_num = df_num.fillna(0.0) df_val = df_val.fillna(0.0) df_res = df_res.fillna(0.0) df_res.ix[0, "cash"] = cash ldt_orders_dates = list(df_orders.index) iter_orders_dates = iter(ldt_orders_dates) dt_orders_date = iter_orders_dates.next() dt_last_orders_date = dt_orders_date for dt_date in ldt_dates: update_port(dt_date, dt_last_orders_date, ls_symbols, df_num, df_res) if dt_date == dt_orders_date: process_daily_orders(dt_date, df_orders, df_prices, df_num, df_val, df_res) try: dt_last_orders_date = dt_orders_date dt_orders_date = iter_orders_dates.next() except StopIteration: pass value_port(dt_date, ls_symbols, df_prices, df_num, df_val, df_res) df_port = df_num.join(df_val, lsuffix="_num", rsuffix="_val").join(df_res) #df_port.to_csv("port.csv") return df_port def save_values(df_port, s_out_file_path): ldt_dates = df_port.index na_dates = np.array([[dt_date.year, dt_date.month, dt_date.day] for dt_date in ldt_dates]) na_total = np.array(df_port["total"]) na_values = np.insert(arr=na_dates, obj=3, values=na_total, axis=1) df_values = pd.DataFrame(na_values, columns=["year", "month", "day", "total"]) df_values.to_csv(s_out_file_path, sep=",", header=False, index=False) if __name__ == '__main__': print "start market_sim.py" s_in_file_path = "data\\q1_orders.csv" s_out_file_path = "data\\q1_values.csv" s_cash = "100000" f_cash = float(s_cash) df_orders_list = get_orders_list(s_in_file_path) df_orders, dt_start, dt_end, ls_symbols = get_orders(df_orders_list) df_prices = get_prices(dt_start, dt_end, ls_symbols) df_port = process_orders(df_orders, df_prices, f_cash) save_values(df_port, s_out_file_path) print "end market_sim.py"<|fim▁end|>
df_orders = df_orders.sort() dt_start = df_orders.ix[0].name dt_end = df_orders.ix[-1].name
<|file_name|>ember-cli-build.js<|end_file_name|><|fim▁begin|>/* eslint-env node */ 'use strict'; const EmberAddon = require('ember-cli/lib/broccoli/ember-addon'); module.exports = function(defaults) { let app = new EmberAddon(defaults, { 'ember-cli-babel': { includePolyfill: true }<|fim▁hole|> app.import('node_modules/highlightjs/styles/monokai-sublime.css'); /* This build file specifies the options for the dummy test app of this addon, located in `/tests/dummy` This build file does *not* influence how the addon or the app using it behave. You most likely want to be modifying `./index.js` or app's build file */ return app.toTree(); };<|fim▁end|>
});
<|file_name|>storedPixelDataToCanvasImageDataRGBA.js<|end_file_name|><|fim▁begin|>import now from './now.js'; /**<|fim▁hole|> * @param {Image} image A Cornerstone Image Object * @param {Array} lut Lookup table array * @param {Uint8ClampedArray} canvasImageDataData canvasImageData.data buffer filled with white pixels * * @returns {void} */ export default function (image, lut, canvasImageDataData) { let start = now(); const pixelData = image.getPixelData(); image.stats.lastGetPixelDataTime = now() - start; const numPixels = pixelData.length; const minPixelValue = image.minPixelValue; let canvasImageDataIndex = 0; let storedPixelDataIndex = 0; let pixelValue; // NOTE: As of Nov 2014, most javascript engines have lower performance when indexing negative indexes. // We have a special code path for this case that improves performance. Thanks to @jpambrun for this enhancement // Added two paths (Int16Array, Uint16Array) to avoid polymorphic deoptimization in chrome. start = now(); if (pixelData instanceof Int16Array) { if (minPixelValue < 0) { while (storedPixelDataIndex < numPixels) { pixelValue = lut[pixelData[storedPixelDataIndex++] + (-minPixelValue)]; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = 255; // Alpha } } else { while (storedPixelDataIndex < numPixels) { pixelValue = lut[pixelData[storedPixelDataIndex++]]; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = 255; // Alpha } } } else if (pixelData instanceof Uint16Array) { while (storedPixelDataIndex < numPixels) { pixelValue = lut[pixelData[storedPixelDataIndex++]]; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = 255; // Alpha } } else if (minPixelValue < 0) { while (storedPixelDataIndex < numPixels) { pixelValue = lut[pixelData[storedPixelDataIndex++] + (-minPixelValue)]; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = 255; // Alpha } } else { while (storedPixelDataIndex < numPixels) { pixelValue = lut[pixelData[storedPixelDataIndex++]]; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = pixelValue; canvasImageDataData[canvasImageDataIndex++] = 255; // Alpha } } image.stats.lastStoredPixelDataToCanvasImageDataTime = now() - start; }<|fim▁end|>
* This function transforms stored pixel values into a canvas image data buffer * by using a LUT. *
<|file_name|>OAuthRequest.java<|end_file_name|><|fim▁begin|>package org.jinstagram.auth.model; import org.jinstagram.http.Request; import org.jinstagram.http.Verbs; import java.util.HashMap; import java.util.Map; /** * The representation of an OAuth HttpRequest. * * Adds OAuth-related functionality to the {@link Request} */ public class OAuthRequest extends Request { private static final String OAUTH_PREFIX = "oauth_"; private Map<String, String> oauthParameters; /** * Default constructor. * * @param verb Http verb/method * @param url resource URL */ public OAuthRequest(Verbs verb, String url) { super(verb, url); this.oauthParameters = new HashMap<String, String>(); } /** * Adds an OAuth parameter. * * @param key name of the parameter * @param value value of the parameter * * @throws IllegalArgumentException if the parameter is not an OAuth * parameter<|fim▁hole|> } private static String checkKey(String key) { if (key.startsWith(OAUTH_PREFIX) || key.equals(OAuthConstants.SCOPE)) { return key; } else { throw new IllegalArgumentException(String.format( "OAuth parameters must either be '%s' or start with '%s'", OAuthConstants.SCOPE, OAUTH_PREFIX)); } } /** * Returns the {@link Map} containing the key-value pair of parameters. * * @return parameters as map */ public Map<String, String> getOauthParameters() { return oauthParameters; } @Override public String toString() { return String.format("@OAuthRequest(%s, %s)", getVerb(), getUrl()); } }<|fim▁end|>
*/ public void addOAuthParameter(String key, String value) { oauthParameters.put(checkKey(key), value);
<|file_name|>app.routes.ts<|end_file_name|><|fim▁begin|>// ====== ./app/app.routes.ts ====== // Imports import { ModuleWithProviders } from '@angular/core'; import { Routes, RouterModule } from '@angular/router'; import { ActivationComponent } from './user/activation/activation.component'; import { AdminComponent } from './admin/admin.component'; import { AppComponent } from './app.component'; import { RootComponent } from './root/root.component'; import { DashboardComponent } from './root/dashboard.component'; import { RegistrationComponent } from './user/registration/registration.component'; import { ForgotPasswordComponent } from './user/forgotPassword/forgotPassword.component'; import { ResetPasswordComponent } from './user/resetPassword/resetPassword.component';<|fim▁hole|>import { ClassListComponent } from './class/list/list.component'; import { ClassDetailComponent } from './class/detail/detail.component'; import { CreateClassComponent } from './class/create/create.component'; import { ClassBrowseComponent } from './class/browse/browse.component'; import { DisplayProblemComponent } from './problem/display/displayProblem.component'; // Route Guards import { AuthRouteGuard } from './utils/AuthRouteGuard.service'; import { TeacherRouteGuard } from './utils/TeacherRouteGuard.service'; import { AdminRouteGuard } from './utils/AdminRouteGuard.service'; // Route Configuration export const routes: Routes = [ { path: '', component: RootComponent }, // Application root { path: 'sign-in', component: SigninComponent }, { path: 'register', component: RegistrationComponent }, { path: 'forgot-password', component: ForgotPasswordComponent }, { path: 'reset-password/:id', component: ResetPasswordComponent }, { path: 'activation/:id', component: ActivationComponent}, { path: 'dashboard', component: DashboardComponent, canActivate: [AuthRouteGuard] }, { path: 'class/:id', component: ClassDetailComponent, canActivate: [AuthRouteGuard] }, { path: 'class', component: ClassBrowseComponent, canActivate: [AuthRouteGuard] }, { path: 'admin/:control', component: AdminComponent, canActivate: [AuthRouteGuard, AdminRouteGuard]}, { path: 'admin', component: AdminComponent, canActivate: [AuthRouteGuard, AdminRouteGuard]}, { path: 'create/class', component: CreateClassComponent, canActivate: [AuthRouteGuard, TeacherRouteGuard] }, { path: 'problem/display/:classCode/:problemCode', component: DisplayProblemComponent, canActivate: [AuthRouteGuard] }, // The PageNotFound route MUST be last in this list { path: '**', component: PageNotFoundComponent } // Page Not Found ]; export const routing: ModuleWithProviders = RouterModule.forRoot(routes);<|fim▁end|>
import { SigninComponent } from './user/signin/signin.component'; import { PageNotFoundComponent } from './pageNotFound/pageNotFound.component';
<|file_name|>sender_test.go<|end_file_name|><|fim▁begin|>package channels<|fim▁hole|> import "testing" import "time" func TestSender(t *testing.T) { type args struct { ch chan string done chan bool } tests := []struct { name string args args }{ {"base-case", args{make(chan string, 10), make(chan bool)}}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { go Sender(tt.args.ch, tt.args.done) time.Sleep(100 * time.Millisecond) tt.args.done <- true }) } }<|fim▁end|>
<|file_name|>qa_config.py<|end_file_name|><|fim▁begin|># # # Copyright (C) 2007, 2011 Google Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """QA configuration. """ from ganeti import utils from ganeti import serializer from ganeti import compat import qa_error cfg = None options = None def Load(path): """Loads the passed configuration file. """ global cfg # pylint: disable-msg=W0603 cfg = serializer.LoadJson(utils.ReadFile(path)) Validate()<|fim▁hole|> def Validate(): if len(cfg['nodes']) < 1: raise qa_error.Error("Need at least one node") if len(cfg['instances']) < 1: raise qa_error.Error("Need at least one instance") if len(cfg["disk"]) != len(cfg["disk-growth"]): raise qa_error.Error("Config options 'disk' and 'disk-growth' must have" " the same number of items") def get(name, default=None): return cfg.get(name, default) def TestEnabled(tests): """Returns True if the given tests are enabled. @param tests: a single test, or a list of tests to check """ if isinstance(tests, basestring): tests = [tests] return compat.all(cfg.get("tests", {}).get(t, True) for t in tests) def GetMasterNode(): return cfg['nodes'][0] def AcquireInstance(): """Returns an instance which isn't in use. """ # Filter out unwanted instances tmp_flt = lambda inst: not inst.get('_used', False) instances = filter(tmp_flt, cfg['instances']) del tmp_flt if len(instances) == 0: raise qa_error.OutOfInstancesError("No instances left") inst = instances[0] inst['_used'] = True return inst def ReleaseInstance(inst): inst['_used'] = False def AcquireNode(exclude=None): """Returns the least used node. """ master = GetMasterNode() # Filter out unwanted nodes # TODO: Maybe combine filters if exclude is None: nodes = cfg['nodes'][:] elif isinstance(exclude, (list, tuple)): nodes = filter(lambda node: node not in exclude, cfg['nodes']) else: nodes = filter(lambda node: node != exclude, cfg['nodes']) tmp_flt = lambda node: node.get('_added', False) or node == master nodes = filter(tmp_flt, nodes) del tmp_flt if len(nodes) == 0: raise qa_error.OutOfNodesError("No nodes left") # Get node with least number of uses def compare(a, b): result = cmp(a.get('_count', 0), b.get('_count', 0)) if result == 0: result = cmp(a['primary'], b['primary']) return result nodes.sort(cmp=compare) node = nodes[0] node['_count'] = node.get('_count', 0) + 1 return node def ReleaseNode(node): node['_count'] = node.get('_count', 0) - 1<|fim▁end|>
<|file_name|>flexuin_fill2.rs<|end_file_name|><|fim▁begin|>grid_pattern = { {0, 0.028, "C", "floatiss_"}, {0.024, 0.040, "B", "floatiss_"}, {0.024, 0.028, "C", "floatiss_"}, {0.024, 0.028, "B", "floatiss_"}, {0.024, 0.028, "C", "floatiss_"}, {0.024, 0.028, "B", "floatiss_"}, {0.024, 0.028, "C", "floatiss_"}, {0.024, 0.074, "B", "floatiss_"}, {0.024, 0.028, "C", "floatiss_"}, {0.024, 0.028, "B", "floatiss_"}, {0.024, 0.028, "C", "floatiss_"}, {0.024, 0.028, "B", "floatiss_"}, {0.024, 0.028, "C", "floatiss_"}, {0.024, 0.040, "B", "floatiss_"}, }; offset = -0.014; period = 0.798; //buckets for overlong fill trimming attempts in cut moving multicut_cut_moving = true; multicut_threshold = 1.0; overlong_fill_thresh_buckets = {0.028, 0.28 }; //write_out_orig = false;; //do_ungridded_fill = false; process_name = "1273.3"; fill_length = 30; dfmg_extensions = true; //hybrid = false; use_drawn_kor = true; top_level_only_kor = false; use_route_kor = false; use_fill_kor = true; <|fim▁hole|> use_global_kor = true; drawn_kor_x_space = 0.028; drawn_kor_y_space = 0.024; keepout_cells_list = {""}; keepout_cells_x_space = 0.028; keepout_cells_y_space = 0.024; //no_extend_cells_list = {"cx*",}; //near_fill_density_target = 1.0; //far_fill_density_target = 1.0; density_window_size = 2.1; path = "./"; half_dr_end_to_end = 0.028; //write_milkyway_output = false; //mw_cellname = ""; //mw_library = ""; //mw_path = ""; //mw_view = "FILL"; //mw_append = true; //region_grids = {}; // use_fill_markers = true; consider_off_grid = false; write_out_c_tracks = true; write_out_c_wires = false; //break_pattern = {}; //break_offset = 0.0; //break_period = 0.0; //write_out_b_tracks = false; //transition_pattern = {}; //transition_offset = 0.0; //transition_period = 0.0;<|fim▁end|>
<|file_name|>azure-arm-subscription.ts<|end_file_name|><|fim▁begin|>import tl = require('azure-pipelines-task-lib/task'); import msRestAzure = require('./azure-arm-common'); import azureServiceClientBase = require('./AzureServiceClientBase'); import depolymentsBase = require('./DeploymentsBase'); export class SubscriptionManagementClient extends azureServiceClientBase.AzureServiceClientBase { public subscriptionId: string; constructor(credentials: msRestAzure.ApplicationTokenCredentials, subscriptionId: string, options?: any) { super(credentials); this.validateInputs(subscriptionId); this.apiVersion = '2019-05-10'; this.acceptLanguage = 'en-US';<|fim▁hole|> this.generateClientRequestId = true; if (!!options && !!options.longRunningOperationRetryTimeout) { this.longRunningOperationRetryTimeout = options.longRunningOperationRetryTimeout; } this.deployments = new SubscriptionDeployments(this); this.subscriptionId = subscriptionId; } public getRequestUri(uriFormat: string, parameters: {}, queryParameters?: string[], apiVersion?: string): string { parameters['{subscriptionId}'] = encodeURIComponent(this.subscriptionId); return super.getRequestUriForBaseUri(this.baseUri, uriFormat, parameters, queryParameters, apiVersion); } private validateInputs(subscriptionId: string) { if (!subscriptionId) { throw new Error(tl.loc("SubscriptionIdCannotBeNull")); } } } export class SubscriptionDeployments extends depolymentsBase.DeploymentsBase { protected client: SubscriptionManagementClient; constructor(client: SubscriptionManagementClient) { super(client); this.client = client; } public createOrUpdate(deploymentParameters, parameters, callback) { // Create HTTP request uri var uri = this.client.getRequestUri( '//subscriptions/{subscriptionId}/providers/Microsoft.Resources/deployments/{deploymentName}', { '{deploymentName}': deploymentParameters } ); super.deployTemplate(uri, deploymentParameters, parameters, callback); } public validate(deploymentParameters, parameters, callback) { // Create HTTP request uri var uri = this.client.getRequestUri( '//subscriptions/{subscriptionId}/providers/Microsoft.Resources/deployments/{deploymentName}/validate', { '{deploymentName}': deploymentParameters } ); super.validateTemplate(uri, deploymentParameters, parameters, callback); } }<|fim▁end|>
<|file_name|>secret.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. use std::fmt; use std::ops::Deref; use std::str::FromStr; use secp256k1::key; use bigint::hash::H256; use {Error}; #[derive(Clone, PartialEq, Eq)] pub struct Secret {<|fim▁hole|> fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { write!(fmt, "Secret: 0x{:x}{:x}..{:x}{:x}", self.inner[0], self.inner[1], self.inner[30], self.inner[31]) } } impl Secret { fn from_slice_unchecked(key: &[u8]) -> Self { assert_eq!(32, key.len(), "Caller should provide 32-byte length slice"); let mut h = H256::default(); h.copy_from_slice(&key[0..32]); Secret { inner: h } } pub fn from_slice(key: &[u8]) -> Result<Self, Error> { let secret = key::SecretKey::from_slice(&super::SECP256K1, key)?; Ok(secret.into()) } } impl FromStr for Secret { type Err = Error; fn from_str(s: &str) -> Result<Self, Self::Err> { let hash = H256::from_str(s).map_err(|e| Error::Custom(format!("{:?}", e)))?; Self::from_slice(&hash) } } impl From<key::SecretKey> for Secret { fn from(key: key::SecretKey) -> Self { Self::from_slice_unchecked(&key[0..32]) } } impl Deref for Secret { type Target = H256; fn deref(&self) -> &Self::Target { &self.inner } }<|fim▁end|>
inner: H256, } impl fmt::Debug for Secret {
<|file_name|>leaflet.timedimension.player.js<|end_file_name|><|fim▁begin|>/*jshint indent: 4, browser:true*/ /*global L*/ /* * L.TimeDimension.Player */ //'use strict'; L.TimeDimension.Player = (L.Layer || L.Class).extend({ includes: (L.Evented || L.Mixin.Events), initialize: function(options, timeDimension) { L.setOptions(this, options); this._timeDimension = timeDimension; this._paused = false; this._buffer = this.options.buffer || 5; this._minBufferReady = this.options.minBufferReady || 1; this._waitingForBuffer = false; this._loop = this.options.loop || false; this._steps = 1; this._timeDimension.on('timeload', (function(data) { this.release(); // free clock this._waitingForBuffer = false; // reset buffer }).bind(this)); this.setTransitionTime(this.options.transitionTime || 1000); this._timeDimension.on('limitschanged availabletimeschanged timeload', (function(data) { this._timeDimension.prepareNextTimes(this._steps, this._minBufferReady, this._loop); }).bind(this)); }, _tick: function() { var maxIndex = this._getMaxIndex(); var maxForward = (this._timeDimension.getCurrentTimeIndex() >= maxIndex) && (this._steps > 0); var maxBackward = (this._timeDimension.getCurrentTimeIndex() == 0) && (this._steps < 0); if (maxForward || maxBackward) { // we reached the last step if (!this._loop) { this.pause(); this.stop(); this.fire('animationfinished'); return; } } if (this._paused) { return; } var numberNextTimesReady = 0, buffer = this._bufferSize; if (this._minBufferReady > 0) { numberNextTimesReady = this._timeDimension.getNumberNextTimesReady(this._steps, buffer, this._loop); // If the player was waiting, check if all times are loaded if (this._waitingForBuffer) { if (numberNextTimesReady < buffer) { console.log('Waiting until buffer is loaded. ' + numberNextTimesReady + ' of ' + buffer + ' loaded'); this.fire('waiting', { buffer: buffer, available: numberNextTimesReady }); return; } else { // all times loaded<|fim▁hole|> } } else { // check if player has to stop to wait and force to full all the buffer if (numberNextTimesReady < this._minBufferReady) { console.log('Force wait for load buffer. ' + numberNextTimesReady + ' of ' + buffer + ' loaded'); this._waitingForBuffer = true; this._timeDimension.prepareNextTimes(this._steps, buffer, this._loop); this.fire('waiting', { buffer: buffer, available: numberNextTimesReady }); return; } } } this.pause(); this._timeDimension.nextTime(this._steps, this._loop); if (buffer > 0) { this._timeDimension.prepareNextTimes(this._steps, buffer, this._loop); } }, _getMaxIndex: function(){ return Math.min(this._timeDimension.getAvailableTimes().length - 1, this._timeDimension.getUpperLimitIndex() || Infinity); }, start: function(numSteps) { if (this._intervalID) return; this._steps = numSteps || 1; this._waitingForBuffer = false; var startedOver = false; if (this.options.startOver){ if (this._timeDimension.getCurrentTimeIndex() === this._getMaxIndex()){ this._timeDimension.setCurrentTimeIndex(this._timeDimension.getLowerLimitIndex() || 0); startedOver = true; } } this.release(); this._intervalID = window.setInterval( L.bind(this._tick, this), this._transitionTime); if (!startedOver) this._tick(); this.fire('play'); this.fire('running'); }, stop: function() { if (!this._intervalID) return; clearInterval(this._intervalID); this._intervalID = null; this._waitingForBuffer = false; this.fire('stop'); }, pause: function() { this._paused = true; }, release: function () { this._paused = false; }, getTransitionTime: function() { return this._transitionTime; }, isPlaying: function() { return this._intervalID ? true : false; }, isWaiting: function() { return this._waitingForBuffer; }, isLooped: function() { return this._loop; }, setLooped: function(looped) { this._loop = looped; this.fire('loopchange', { loop: looped }); }, setTransitionTime: function(transitionTime) { this._transitionTime = transitionTime; if (typeof this._buffer === 'function') { this._bufferSize = this._buffer.call(this, this._transitionTime, this._minBufferReady, this._loop); console.log('Buffer size changed to ' + this._bufferSize); } else { this._bufferSize = this._buffer; } if (this._intervalID) { this.stop(); this.start(this._steps); } this.fire('speedchange', { transitionTime: transitionTime, buffer: this._bufferSize }); }, getSteps: function() { return this._steps; } });<|fim▁end|>
console.log('Buffer is fully loaded!'); this.fire('running'); this._waitingForBuffer = false;
<|file_name|>users.js<|end_file_name|><|fim▁begin|>var express = require('express'); var user = require('../model/user'); var jsonReturn = require('../common/jsonReturn'); var router = express.Router(); /* GET users listing. */ router.get('/', function(req, res, next) { user.getUsers(function(err, rows, fields){ res.render('users', { users : rows } ); }) }); router.post('/getUsers', function (req, res, next) { if(req.body.username){ user.getUsers(function(err, rows, fields){<|fim▁hole|>}); module.exports = router;<|fim▁end|>
res.json(jsonReturn({ users : rows } )); }) }
<|file_name|>HF_Sim_Book.py<|end_file_name|><|fim▁begin|>''' Created on Sep 15, 2010 @author: duncantait ''' from SimPy.Simulation import * import numpy as np import random import math class G(): #Settings for HF Stations num_channels = 18 num_stations = 10 class Network(): stations = [] class Medium(): def __init__(self): self.channels = [] for i in range(G.num_channels): S = Store(name=i,capacity=1) self.channels.append(S) class StationContainer(): def __init__(self,ID): self.ID = ID self.Operator = Operator(ID) self.StationSettings = StationSettings(ID) self.Scanning = Scanning(ID) self.Tx = Tx(ID) def initComponents(self): self.Operator.initCounterparts() self.StationSettings.initCounterparts() self.Scanning.initCounterparts() self.Tx.initCounterparts() def activate(self): activate(self.Operator,self.Operator.sendMessage(),at=0.0) activate(self.StationSettings,self.StationSettings.sounding(),at=0.0) activate(self.Scanning,self.Scanning.scan(),at=0.0) activate(self.Tx,self.Tx.sending(),at=0.0) class Operator(Process): def __init__(self, ID): Process.__init__(self) self.ID = ID def initComponents(self): self.StationSettings = [N.StationSettings for N in Network.stations if N.ID==self.ID][0] def sendMessage(self): while True: #every so often operator wants to send a message: adds to queue. yield hold, self, random.uniform(0,1200) #Create a Message of type 'CALL' frameInfo = frameDetails(self.ID,self.decideDestination(),0,fType.CALL,False,-1,-1) frameInfo.channels = self.ChannelOrder(frameInfo.destination) yield put,self,self.Tx.sendQ,[frameInfo] yield hold, self, random.uniform(0,1200) def decideDestination(self): while True: dest = random.randint(0,G.num_channels-1) if dest != self.ID: return dest def ChannelOrder(self,channel=-1,station=-1): #sorts best channels best-worst if channel==-1: ordered = self.StationSettings.channelBER[station,:].argsort() return ordered[::-1] #reverse order of array if station==-1: ordered = self.StationSettings.channelBER[:,channel].argsort() return ordered[::-1] class StationSettings(Process): def __init__(self, ID): Process.__init__(self) self.ID = ID self.state = sState.SCANNING #can be scanning, linking or linked. self.sending = False self.channelBER = np.zeros((G.num_channels,G.num_stations)) #LQA: Link Quality Analysis self.timeout = 2 #current timeout counter for linking/linked mode, if this hits zero, go back to scanning self.Td = 2 #dwell time per channel self.Twce = 2 #wait for calling cycle to end self.Twr = 2 self.minLQA = 0.2 self.bitrate = 392 self.hardwareTime = 20 #e.g. power up/down time, modulation/demodulation, encoding/decoding, crypto in ms. #tune up/down time. Included in Twrt (wait for response and tune time) def Sounding(self): while True: yield hold, self, random.uniform(0,120) #Sound yield hold, self, 1800 class Scanning(Process): #Is HF ALWAYS scanning? No, either scanning, linking or linked def __init__(self, ID): self.ID = ID Process.__init__(self) self.currentChannel = 0 def initComponents(self): self.StationSettings = [N.StationSettings for N in Network.stations if N.ID==self.ID][0] self.Tx = [N.Tx for N in Network.stations if N.ID==self.ID][0] def scan(self): while True: #Different responses depending on mode. #Rules: cannot receive while sending <----------------- #Otherwise, packets will be interpreted as to the mode the station is in. channel = Medium.channels[self.currentChannel] yield (get,self,channel,1),(hold,self,self.StationSettings.timeout) if self.acquired(channel): signal = self.got yield put, self , channel, signal frameInfo = self.decode(signal) #This implies picking up the signal frame by frame from the channel if (frameInfo.LQA > self.StationSettings.minLQA) and (frameInfo.destination==self.ID): yield (put,self,channel,['PH:'+str(self.ID)]),(hold,self,self.StationSettings.Twce) if self.stored(channel): yield get,self,channel,1 #Yank sniffer packet back off channel. if frameInfo.type== fType.CALL: if self.StationSettings.state==sState.SCANNING: yield put,self,self.Tx.sendQ,[frameInfo] self.StationSettings.state=sState.LINKING yield waitevent,self,self.Tx.sE if frameInfo.type== fType.RESPONSE: if self.StationSettings.state==sState.LINKING: yield put,self,self.Tx.sendQ,[frameInfo] yield waitevent,self,self.Tx.sE if frameInfo.type== fType.ACK: if self.StationSettings.state==sState.LINKING: yield put,self,self.Tx.sendQ,[frameInfo] self.StationSettings.state=sState.LINKED yield waitevent,self,self.Tx.sE if frameInfo.type== fType.QUICK_ID: if (self.StationSettings.state==sState.SCANNING or sState.LINKED) and (frameInfo.terminate==False): 'I dont think you can have a QUICK ID out of the blue, and it doesnt need a reply...' #yield put,self,self.Tx.sendQ,[frameInfo] #yield waitevent,self,self.Tx.sE elif frameInfo.terminate==True: self.StationSettings.state=sState.SCANNING if frameInfo.type== fType.MSG: if self.StationSettings.state== sState.LINKED and frameInfo.terminate==False: 'again, why the reply? just keep channel open...' elif frameInfo.terminate==True: self.StationSettings.state=sState.SCANNING #yield put,self,self.Tx.sendQ,[frameInfo] #yield waitevent,self,self.Tx.sE else: print 'Invalid Packet' self.StationSettings.state=sState.SCANNING else: print 'Timed out' self.StationSettings.state=sState.SCANNING else: 'Frame unsuitable: Continue Scan' self.StationSettings.state=sState.SCANNING else: 'Channel Empty: Continue Scan' self.StationSettings.state=sState.SCANNING if self.StationSettings.state==sState.SCANNING: if self.currentChannel==G.num_channels-1: self.currentChannel = 0 else: self.currentChannel+=1 def decode(self,frameInfo): #Return a packet useable to send straightaway. All data is known to achieve this. returnInfo = self.convertReply(frameInfo)<|fim▁hole|> returnInfo = self.calculate_LQA(returnInfo) returnInfo.channels = self.currentChannel #Messages and Acks/Responses always have to be on the same channel as before... which is all #That is dealt with in 'Scanning' returnInfo.terminate = False #This needs to be somewhat randomised, but for now this will do. return returnInfo #If LQA below certain amount, reject in PEM above def convertReply(self, frameInfo): #Convert incoming packet into it's appropriate output type. returnInfo = frameInfo if frameInfo.type==fType.OUT: returnInfo.type= fType.CALL if frameInfo.type==fType.CALL: returnInfo.origin = frameInfo.destination returnInfo.destination = frameInfo.origin returnInfo.type = fType.RESPONSE elif frameInfo.type == fType.RESPONSE: returnInfo.type = fType.ACK returnInfo.origin = frameInfo.destination returnInfo.destination = frameInfo.origin elif frameInfo.type == fType.ACK: returnInfo.type = fType.MSG returnInfo.origin = frameInfo.destination returnInfo.destination = frameInfo.origin returnInfo = self.decidePayload(returnInfo) #Messages get a payload. return returnInfo def responseSize(self,frameInfo): returnInfo = frameInfo destination = self.get_address(frameInfo.destination) origin = self.get_address(frameInfo.origin) if returnInfo.type == fType.RESPONSE or fType.ACK: returnInfo.size += len(destination)*2*49 + len(origin)*49 #each word is 49bits after encoding return returnInfo def decidePayload(self, frameInfo): #Data Block Mode: Basic mode 0-572 bits, Extended 572-262820 bits (+18 each for cyclic redundancy check), #Extended data blocks are 588 bits (49*12 + 16 FCS) Basic are 49 bits. note 572 bits = 81 ASCII chars. #Other modes are AMD (auto msg display) and DTM (data text msg), but less efficient for larger data #Upper bound performance = 375 * (588/1176) = 187.5bps #Also, many many CMD words that do many things. (Important one being LQA transfer) #See pages around 231, need to add CMD and extra necessary words to these data_blocks etc. returnInfo = frameInfo mode = random.randint(0,10) if mode==0 or mode==1: #basic data block mode returnInfo.size += random.randint(1,81)*7 + 16 elif mode==2: #extended data block mode (least likely) returnInfo.size += random.randint(82,37260)*7 + 16 elif mode==3 or mode==4 or mode==5 or mode==6: #CMD message returnInfo.size += 24 #1 extra word elif mode==7 or mode==8 or mode==9 or mode==10: returnInfo.size += 0 #null return returnInfo def get_address(self, address): words = [] div = math.floor(len(address)/3) rem = len(address)%3 i = 0 rep = True for word in range(div): words.append(address[i:i+3]) if rep==False and i >= 3: words.append('DATA') else: words.append('REP') rep = not rep i += 3 if rem>0: final_word = address[i:i+rem] + '@'*(3-rem) words.append(final_word) return words # #Instead of 'crafting messages' and spending ages about it. Merely use the functions written already #(for making the words etc.) to calculate the SIZE of the signal, and make this a parameter of the #frameInfo that sits on the channel. This can then be used to say HOW LONG it stays on the channel, and #how long the receiver must receive for (although this doesn't matter too much as the receiver is effectively #locked in one state once it enters linking/linked mode (line 101). This solves Response/Ack problem too # class Tx(Process): def __init__(self,ID): self.ID = ID Process.__init__(self) self.sendQ = Store(name=ID,capacity='unbounded') self.sE = SimEvent(name='TxSent') def initComponents(self): self.StationSettings = [N.StationSettings for N in Network.stations if N.ID==self.ID][0] def sending(self): while True: yield get,self,self.sendQ,1 frameInfo = self.got[0] #data in form frameDetails() signal_time = frameInfo.size*self.StationSettings.bitrate + self.StationSettings.hardwareTime frameInfo.LQA = self.calculate_LQA(frameInfo.destination) unSent = True for chanNum in frameInfo.channels: if unSent: channel = Medium.channels(chanNum) if channel.nrBuffered==0: print 'Channel', chanNum, 'free, occupying..' yield put,self,channel,[frameInfo] unSent = False if self.type == fType.CALL: #call cycle yield hold,self,2*self.StationSettings.Td*G.num_stations #THIS NEEDS ATTENTION AS IT IS DIFFERENT FROM THE REST - This could actually be ok... just needs some additions for propagation time #could use 'signal_time' from 'size' but kind of backwards... if self.interrupted(): print 'Collision occurred, station:', self.ID else: yield hold,self,signal_time #How long does it take to get there?! if self.interrupted(): print 'Collision occurred, station:', self.ID yield get,self,channel,1 #Message delivered. #UNPASSIVATE SCANNING PEM self.sE.signal(frameInfo) self.StationSettings.timeout = self.StationSettings.Twr #INVESTIGATE THIS TIMEOUT VARIABLE, WHAT DOES IT ACTUALLY DO? SEEM TO REMEMBER IT BEING A GOOD IDEA. def calculate_LQA(self, destination): #This algorithm has potential to be highly detailed #Parameters needed: positions of 2 stations --> distance #Ionospheric conditions #Time of day, sunspot cycle. #For now, stations closer in numbers are better connected. #This should be in Rx as it needs to eventually interface with an Environment process distance = abs(self.ID - destination)/G.num_stations LQA = random.normalvariate(100-(distance*100),4) if LQA > 1: LQA=1 if LQA < 0: LQA=0 ##CATER FOR IF OUTGOING FRAME FAILS AND NEEDS TO REPEAT USING A DIFFERENT CHANNEL! (extra parameter?) #class OutgoingFrame(Process): # def __init__(self,ID,frameInfo,frame): # #channels is a list of channels, for a response or single channel call, it will only contain 1 entry # Process.__init__(self) # self.ID = ID # self.destination = frameInfo.destination # self.channelOrder = frameInfo.channels # self.type = frameInfo.type # self.frame = frame # def initComponents(self): # self.StationSettings = [N.StationSettings for N in Network.stations if N.ID==self.ID][0] # self.Tx = [N.Tx for N in Network.stations if N.ID==self.ID][0] # def go(self): # unSent = True # for chanNum in self.channelOrder: # if unSent: # channel = Medium.channels(chanNum) # if channel.nrBuffered==0: # print 'Channel', chanNum, 'free, occupying..' # yield put,self,channel,[self.frame] # unSent = False # if self.type == fType.OUT: #call cycle # yield hold,self,2*self.StationSettings.Td*G.num_stations # if self.interrupted(): # print 'Collision occurred, station:', self.ID # if self.type == fType.RESPONSE: # yield hold,self,self.StationSettings.Twr #How long does it take to get there?! # if self.interrupted(): # print 'Collision occurred, station:', self.ID # yield get,self,channel,1 #Message delivered. # #UNPASSIVATE SCANNING PEM # self.StationSettings.timeout = self.StationSettings.Twr class frameDetails(): def __init__(self,origin,destination,size,type,terminate,channels,LQA): self.origin = origin self.destination = destination self.size = size self.type = type self.terminate = terminate self.channels = channels self.LQA = LQA class fType(): MSG = 1 QUICK_ID = 2 CALL = 3 RESPONSE = 4 ACK = 5 OUT = 6 class sState(): SCANNING = 1 LINKING = 2 LINKED = 3 initialize() Medium = Medium() Network.stations = [StationContainer(i) for i in range(G.num_stations)] for N in Network.stations: N.initComponents() N.activate() simulate(until=G.max_time)<|fim▁end|>
returnInfo = self.responseSize(returnInfo)
<|file_name|>CaseFactorySpec.ts<|end_file_name|><|fim▁begin|>import CaseFactory from 'domains/CaseFactory' import Case from 'domains/Case' import * as assert from 'power-assert' describe("ThenFactory", function() { describe("create", () => { before(() => { this.newCase = CaseFactory.create<number,number>({ when: n => n > 0, then: v => v + 1, }) }) it("should be return new instance", () => { assert.ok(this.newCase instanceof Case) }) it("should be test correctly", () => { assert.equal(this.newCase.when.test(10), true) assert.equal(this.newCase.when.test(-1), false) }) it("should be exec correctly", () => { assert.equal(this.newCase.then.exec(1), 2) })<|fim▁hole|> before(() => { this.newCase = CaseFactory.createNone<number,number>({ then: v => v + 1, }) }) it("should be return new instance", () => { assert.ok(this.newCase instanceof Case) }) it("should be test correctly", () => { assert.equal(this.newCase.when.test(undefined), true) assert.equal(this.newCase.when.test(null), true) assert.equal(this.newCase.when.test(0), false) }) it("should be exec correctly", () => { assert.equal(this.newCase.then.exec(1), 2) }) }) describe("createElse", () => { before(() => { this.newCase = CaseFactory.createElse<number,number>({ then: v => v + 1, }) }) it("should be return new instance", () => { assert.ok(this.newCase instanceof Case) }) it("should be test correctly", () => { assert.equal(this.newCase.when.test(1), true) assert.equal(this.newCase.when.test(undefined), true) assert.equal(this.newCase.when.test(null), true) assert.equal(this.newCase.when.test(0), true) }) it("should be exec correctly", () => { assert.equal(this.newCase.then.exec(1), 2) }) }) })<|fim▁end|>
}) describe("createNone", () => {
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate littletest; use littletest::{Runnable, TestResult, TestRunner}; struct TestCase { result: TestResult, } impl TestCase { fn new(result: TestResult) -> TestCase { TestCase { result: result } } } impl Runnable for TestCase { fn run(&self) -> TestResult { self.result.clone()<|fim▁hole|>} #[test] fn it_works() { use std::iter::repeat; let runnables = repeat(TestResult::Pass) .take(10) .map(|result| Box::new(TestCase::new(result)) as Box<Runnable + Sync>) .collect::<Vec<_>>(); let runner = TestRunner::new(true); runner.run(&runnables); }<|fim▁end|>
}
<|file_name|>bitwiseNotOperatorWithEnumType.ts<|end_file_name|><|fim▁begin|>// ~ operator on enum type enum ENUM1 { 1, 2, "" }; // enum type var var ResultIsNumber1 = ~ENUM1; // enum type expressions<|fim▁hole|> // multiple ~ operators var ResultIsNumber4 = ~~~(ENUM1[1] + ENUM1[2]); // miss assignment operators ~ENUM1; ~ENUM1[1]; ~ENUM1[1], ~ENUM1[2];<|fim▁end|>
var ResultIsNumber2 = ~ENUM1[1]; var ResultIsNumber3 = ~(ENUM1[1] + ENUM1[2]);
<|file_name|>recorder.py<|end_file_name|><|fim▁begin|>import re import datetime from pymongo import MongoClient from bson import ObjectId from .exception import RecorderException, StructureException __all__ = ['get_database', 'Recorder', 'Structure'] def get_database(db_name, host, port=27017): return MongoClient(host, port)[db_name] class Structure(dict): __store = {} def __init__(self, *args, **kwargs): super(Structure, self).__init__(*args, **kwargs) self.__dict__ = self self._validate() def _validate(self): pass def to_dict(self): return self.__dict__ class Recorder: struct = None __store = None<|fim▁hole|> database = None class DataStore: def get(self, key): return self.__dict__.get(key) def set(self, key, value): self.__dict__[key] = value def to_dict(self): return self.__dict__ def __init__(self, key, data=None): self._key = key self.__store = self.DataStore() self._init_from_dict(data) def _init_from_dict(self, data): if not isinstance(self.struct, Structure): raise RecorderException("{0} struct is not a defined".format(self.__class__.__name__)) if not isinstance(data, dict): data = dict() # initialize store data for k, v in self.struct.to_dict().items(): result = data.get(k) if not result: result = v self.__store.set(k, result) def key(self): return self._key def pk(self): return ObjectId(self.key()) def __str__(self): return self.__name__ def __getattr__(self, key): if key in list(self.struct.keys()): return self.__store.get(key) else: return super(Recorder, self).__getattr__(key) def __setattr__(self, key, value): if key in list(self.struct.keys()): self.__store.set(key, value) else: super(Recorder, self).__setattr__(key, value) @classmethod def colname(cls): return re.sub('(?!^)([A-Z]+)', r'_\1', cls.__name__).lower().__str__() @classmethod def collection(cls): return cls.Meta.database[cls.colname()] @classmethod def new(cls, data=None): return cls(None, data) @classmethod def create(cls, data): key = None if '_id' in data.keys(): key = data['_id'] if isinstance(data['_id'], ObjectId): key = data['_id'].__str__() return cls(key, data) @classmethod def get(cls, key, *args, **kwargs): data = cls.collection().find_one({'_id': ObjectId(key)}, *args, **kwargs) if not data: return None return cls(key, data) @classmethod def get_by(cls, key, value, *args, **kwargs): data = cls.collection().find_one({key: value}, *args, **kwargs) if not data: return None return cls.create(data) @classmethod def find(cls, *args, **kwargs): return [cls.create(x) for x in cls.collection().find(*args, **kwargs)] def save(self): if not self.key(): return self.insert() return self.update() def insert(self): result = self.collection().insert_one(self.to_mongo()) self._key = result.inserted_id.__str__() self.__store.set('_id', self.key()) return True def update(self, upsert=False): if not self.key(): return self.insert() self.collection().update_one({'_id': self.pk()}, {'$set': self.to_mongo()}, upsert=upsert) return True def delete(self): if not self.key(): return False self.collection().delete_one({'_id': self.pk()}) return True @classmethod def exists(cls, key, value): return cls.find(filter={key: value}, limit=1).__len__() > 0 def to_dict(self): return self.__store.to_dict() def to_mongo(self): store = self.to_dict() now = datetime.datetime.now() if not 'created_at' in store.keys(): store['created_at'] = now store['modified_at'] = now if '_id' in store.keys(): del store['_id'] return store<|fim▁end|>
class Meta:
<|file_name|>server.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010-2011 OpenStack, LLC. # Copyright (c) 2008-2011 Gluster, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Object Server for Swift """ from __future__ import with_statement import cPickle as pickle import errno import os import time import traceback from datetime import datetime from hashlib import md5 from tempfile import mkstemp from urllib import unquote from contextlib import contextmanager from ConfigParser import ConfigParser from webob import Request, Response, UTC from webob.exc import HTTPAccepted, HTTPBadRequest, HTTPCreated, \ HTTPInternalServerError, HTTPNoContent, HTTPNotFound, \ HTTPNotModified, HTTPPreconditionFailed, \ HTTPRequestTimeout, HTTPUnprocessableEntity, HTTPMethodNotAllowed from xattr import getxattr, setxattr from eventlet import sleep, Timeout, tpool from swift.common.utils import mkdirs, normalize_timestamp, \ storage_directory, hash_path, renamer, fallocate, \ split_path, drop_buffer_cache, get_logger, write_pickle, \ plugin_enabled from swift.common.bufferedhttp import http_connect if plugin_enabled(): from swift.plugins.constraints import check_object_creation from swift.plugins.utils import X_TYPE, X_OBJECT_TYPE, FILE, DIR, MARKER_DIR, \ OBJECT, DIR_TYPE, FILE_TYPE else: from swift.common.constraints import check_object_creation from swift.common.constraints import check_mount, check_float, check_utf8 from swift.common.exceptions import ConnectionTimeout, DiskFileError, \ DiskFileNotExist from swift.obj.replicator import tpooled_get_hashes, invalidate_hash, \ quarantine_renamer DATADIR = 'objects' ASYNCDIR = 'async_pending' PICKLE_PROTOCOL = 2 METADATA_KEY = 'user.swift.metadata' MAX_OBJECT_NAME_LENGTH = 1024 KEEP_CACHE_SIZE = (5 * 1024 * 1024) # keep these lower-case DISALLOWED_HEADERS = set('content-length content-type deleted etag'.split()) def read_metadata(fd): """ Helper function to read the pickled metadata from an object file. :param fd: file descriptor to load the metadata from :returns: dictionary of metadata """ metadata = '' key = 0 try: while True: metadata += getxattr(fd, '%s%s' % (METADATA_KEY, (key or ''))) key += 1 except IOError: pass return pickle.loads(metadata) def write_metadata(fd, metadata): """ Helper function to write pickled metadata for an object file. :param fd: file descriptor to write the metadata :param metadata: metadata to write """ metastr = pickle.dumps(metadata, PICKLE_PROTOCOL) key = 0 while metastr: setxattr(fd, '%s%s' % (METADATA_KEY, key or ''), metastr[:254]) metastr = metastr[254:] key += 1 class DiskFile(object): """ Manage object files on disk. :param path: path to devices on the node :param device: device name :param partition: partition on the device the object lives in :param account: account name for the object :param container: container name for the object :param obj: object name for the object :param keep_data_fp: if True, don't close the fp, otherwise close it :param disk_chunk_Size: size of chunks on file reads """ def __init__(self, path, device, partition, account, container, obj, logger, keep_data_fp=False, disk_chunk_size=65536): self.disk_chunk_size = disk_chunk_size self.name = '/' + '/'.join((account, container, obj)) name_hash = hash_path(account, container, obj) self.datadir = os.path.join(path, device, storage_directory(DATADIR, partition, name_hash)) self.device_path = os.path.join(path, device) self.tmpdir = os.path.join(path, device, 'tmp') self.logger = logger self.metadata = {} self.meta_file = None self.data_file = None self.fp = None self.iter_etag = None self.started_at_0 = False self.read_to_eof = False self.quarantined_dir = None self.keep_cache = False if not os.path.exists(self.datadir): return files = sorted(os.listdir(self.datadir), reverse=True) for file in files: if file.endswith('.ts'): self.data_file = self.meta_file = None self.metadata = {'deleted': True} return if file.endswith('.meta') and not self.meta_file: self.meta_file = os.path.join(self.datadir, file) if file.endswith('.data') and not self.data_file: self.data_file = os.path.join(self.datadir, file) break if not self.data_file: return self.fp = open(self.data_file, 'rb') self.metadata = read_metadata(self.fp) if not keep_data_fp: self.close(verify_file=False) if self.meta_file: with open(self.meta_file) as mfp: for key in self.metadata.keys(): if key.lower() not in DISALLOWED_HEADERS: del self.metadata[key] self.metadata.update(read_metadata(mfp)) def __iter__(self): """Returns an iterator over the data file.""" try: dropped_cache = 0 read = 0 self.started_at_0 = False self.read_to_eof = False if self.fp.tell() == 0: self.started_at_0 = True self.iter_etag = md5() while True: chunk = self.fp.read(self.disk_chunk_size) if chunk: if self.iter_etag: self.iter_etag.update(chunk) read += len(chunk) if read - dropped_cache > (1024 * 1024): self.drop_cache(self.fp.fileno(), dropped_cache, read - dropped_cache) dropped_cache = read yield chunk else: self.read_to_eof = True self.drop_cache(self.fp.fileno(), dropped_cache, read - dropped_cache) break finally: self.close() def app_iter_range(self, start, stop): """Returns an iterator over the data file for range (start, stop)""" if start: self.fp.seek(start) if stop is not None: length = stop - start else: length = None for chunk in self: if length is not None: length -= len(chunk) if length < 0: # Chop off the extra: yield chunk[:length] break yield chunk def _handle_close_quarantine(self): """Check if file needs to be quarantined""" try: obj_size = self.get_data_file_size() except DiskFileError, e: self.quarantine() return except DiskFileNotExist: return if (self.iter_etag and self.started_at_0 and self.read_to_eof and 'ETag' in self.metadata and self.iter_etag.hexdigest() != self.metadata.get('ETag')): self.quarantine() def close(self, verify_file=True): """ Close the file. Will handle quarantining file if necessary. :param verify_file: Defaults to True. If false, will not check file to see if it needs quarantining. """ if self.fp: try: if verify_file: self._handle_close_quarantine() except (Exception, Timeout), e: import traceback self.logger.error(_('ERROR DiskFile %(data_file)s in ' '%(data_dir)s close failure: %(exc)s : %(stack)'), {'exc': e, 'stack': ''.join(traceback.format_stack()), 'data_file': self.data_file, 'data_dir': self.datadir}) finally: self.fp.close() self.fp = None def is_deleted(self): """ Check if the file is deleted. :returns: True if the file doesn't exist or has been flagged as deleted. """ return not self.data_file or 'deleted' in self.metadata @contextmanager def mkstemp(self): """Contextmanager to make a temporary file.""" if not os.path.exists(self.tmpdir): mkdirs(self.tmpdir) fd, tmppath = mkstemp(dir=self.tmpdir) try: yield fd, tmppath finally: try: os.close(fd) except OSError: pass try: os.unlink(tmppath) except OSError: pass def put(self, fd, tmppath, metadata, extension='.data'): """ Finalize writing the file on disk, and renames it from the temp file to the real location. This should be called after the data has been written to the temp file. :params fd: file descriptor of the temp file :param tmppath: path to the temporary file being used :param metadata: dictionary of metadata to be written :param extention: extension to be used when making the file<|fim▁hole|> """ metadata['name'] = self.name timestamp = normalize_timestamp(metadata['X-Timestamp']) write_metadata(fd, metadata) if 'Content-Length' in metadata: self.drop_cache(fd, 0, int(metadata['Content-Length'])) tpool.execute(os.fsync, fd) invalidate_hash(os.path.dirname(self.datadir)) renamer(tmppath, os.path.join(self.datadir, timestamp + extension)) self.metadata = metadata def unlinkold(self, timestamp): """ Remove any older versions of the object file. Any file that has an older timestamp than timestamp will be deleted. :param timestamp: timestamp to compare with each file """ timestamp = normalize_timestamp(timestamp) for fname in os.listdir(self.datadir): if fname < timestamp: try: os.unlink(os.path.join(self.datadir, fname)) except OSError, err: # pragma: no cover if err.errno != errno.ENOENT: raise def drop_cache(self, fd, offset, length): """Method for no-oping buffer cache drop method.""" if not self.keep_cache: drop_buffer_cache(fd, offset, length) def quarantine(self): """ In the case that a file is corrupted, move it to a quarantined area to allow replication to fix it. :returns: if quarantine is successful, path to quarantined directory otherwise None """ if not (self.is_deleted() or self.quarantined_dir): self.quarantined_dir = quarantine_renamer(self.device_path, self.data_file) return self.quarantined_dir def get_data_file_size(self): """ Returns the os.path.getsize for the file. Raises an exception if this file does not match the Content-Length stored in the metadata. Or if self.data_file does not exist. :returns: file size as an int :raises DiskFileError: on file size mismatch. :raises DiskFileNotExist: on file not existing (including deleted) """ try: file_size = 0 if self.data_file: file_size = os.path.getsize(self.data_file) if 'Content-Length' in self.metadata: metadata_size = int(self.metadata['Content-Length']) if file_size != metadata_size: raise DiskFileError('Content-Length of %s does not ' 'match file size of %s' % (metadata_size, file_size)) return file_size except OSError, err: if err.errno != errno.ENOENT: raise raise DiskFileNotExist('Data File does not exist.') if plugin_enabled(): from swift.plugins.DiskFile import Gluster_DiskFile class ObjectController(object): """Implements the WSGI application for the Swift Object Server.""" def __init__(self, conf): """ Creates a new WSGI application for the Swift Object Server. An example configuration is given at <source-dir>/etc/object-server.conf-sample or /etc/swift/object-server.conf-sample. """ self.logger = get_logger(conf, log_route='object-server') self.devices = conf.get('devices', '/srv/node/') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') self.node_timeout = int(conf.get('node_timeout', 3)) self.conn_timeout = float(conf.get('conn_timeout', 0.5)) self.disk_chunk_size = int(conf.get('disk_chunk_size', 65536)) self.network_chunk_size = int(conf.get('network_chunk_size', 65536)) self.log_requests = conf.get('log_requests', 't')[:1].lower() == 't' self.max_upload_time = int(conf.get('max_upload_time', 86400)) self.slow = int(conf.get('slow', 0)) self.bytes_per_sync = int(conf.get('mb_per_sync', 512)) * 1024 * 1024 default_allowed_headers = ''' content-disposition, content-encoding, x-delete-at, x-object-manifest, ''' self.allowed_headers = set(i.strip().lower() for i in \ conf.get('allowed_headers', \ default_allowed_headers).split(',') if i.strip() and \ i.strip().lower() not in DISALLOWED_HEADERS) self.expiring_objects_account = \ (conf.get('auto_create_account_prefix') or '.') + \ 'expiring_objects' self.expiring_objects_container_divisor = \ int(conf.get('expiring_objects_container_divisor') or 86400) self.fs_object = None def get_DiskFile_obj(self, path, device, partition, account, container, obj, logger, keep_data_fp=False, disk_chunk_size=65536): if self.fs_object: return Gluster_DiskFile(path, device, partition, account, container, obj, logger, keep_data_fp, disk_chunk_size, fs_object = self.fs_object); else: return DiskFile(path, device, partition, account, container, obj, logger, keep_data_fp, disk_chunk_size) def async_update(self, op, account, container, obj, host, partition, contdevice, headers_out, objdevice): """ Sends or saves an async update. :param op: operation performed (ex: 'PUT', or 'DELETE') :param account: account name for the object :param container: container name for the object :param obj: object name :param host: host that the container is on :param partition: partition that the container is on :param contdevice: device name that the container is on :param headers_out: dictionary of headers to send in the container request :param objdevice: device name that the object is in """ full_path = '/%s/%s/%s' % (account, container, obj) if all([host, partition, contdevice]): try: with ConnectionTimeout(self.conn_timeout): ip, port = host.rsplit(':', 1) conn = http_connect(ip, port, contdevice, partition, op, full_path, headers_out) with Timeout(self.node_timeout): response = conn.getresponse() response.read() if 200 <= response.status < 300: return else: self.logger.error(_('ERROR Container update failed ' '(saving for async update later): %(status)d ' 'response from %(ip)s:%(port)s/%(dev)s'), {'status': response.status, 'ip': ip, 'port': port, 'dev': contdevice}) except (Exception, Timeout): self.logger.exception(_('ERROR container update failed with ' '%(ip)s:%(port)s/%(dev)s (saving for async update later)'), {'ip': ip, 'port': port, 'dev': contdevice}) async_dir = os.path.join(self.devices, objdevice, ASYNCDIR) ohash = hash_path(account, container, obj) write_pickle( {'op': op, 'account': account, 'container': container, 'obj': obj, 'headers': headers_out}, os.path.join(async_dir, ohash[-3:], ohash + '-' + normalize_timestamp(headers_out['x-timestamp'])), os.path.join(self.devices, objdevice, 'tmp')) def container_update(self, op, account, container, obj, headers_in, headers_out, objdevice): """ Update the container when objects are updated. :param op: operation performed (ex: 'PUT', or 'DELETE') :param account: account name for the object :param container: container name for the object :param obj: object name :param headers_in: dictionary of headers from the original request :param headers_out: dictionary of headers to send in the container request :param objdevice: device name that the object is in """ host = headers_in.get('X-Container-Host', None) partition = headers_in.get('X-Container-Partition', None) contdevice = headers_in.get('X-Container-Device', None) if not all([host, partition, contdevice]): return self.async_update(op, account, container, obj, host, partition, contdevice, headers_out, objdevice) def delete_at_update(self, op, delete_at, account, container, obj, headers_in, objdevice): """ Update the expiring objects container when objects are updated. :param op: operation performed (ex: 'PUT', or 'DELETE') :param account: account name for the object :param container: container name for the object :param obj: object name :param headers_in: dictionary of headers from the original request :param objdevice: device name that the object is in """ host = partition = contdevice = None headers_out = {'x-timestamp': headers_in['x-timestamp'], 'x-trans-id': headers_in.get('x-trans-id', '-')} if op != 'DELETE': host = headers_in.get('X-Delete-At-Host', None) partition = headers_in.get('X-Delete-At-Partition', None) contdevice = headers_in.get('X-Delete-At-Device', None) headers_out['x-size'] = '0' headers_out['x-content-type'] = 'text/plain' headers_out['x-etag'] = 'd41d8cd98f00b204e9800998ecf8427e' self.async_update(op, self.expiring_objects_account, str(delete_at / self.expiring_objects_container_divisor * self.expiring_objects_container_divisor), '%s-%s/%s/%s' % (delete_at, account, container, obj), host, partition, contdevice, headers_out, objdevice) def POST(self, request): """Handle HTTP POST requests for the Swift Object Server.""" try: device, partition, account, container, obj = \ split_path(unquote(request.path), 5, 5, True) except ValueError, err: return HTTPBadRequest(body=str(err), request=request, content_type='text/plain') if 'x-timestamp' not in request.headers or \ not check_float(request.headers['x-timestamp']): return HTTPBadRequest(body='Missing timestamp', request=request, content_type='text/plain') new_delete_at = int(request.headers.get('X-Delete-At') or 0) if new_delete_at and new_delete_at < time.time(): return HTTPBadRequest(body='X-Delete-At in past', request=request, content_type='text/plain') if self.mount_check and not check_mount(self.devices, device): return Response(status='507 %s is not mounted' % device) file = self.get_DiskFile_obj(self.devices, device, partition, account, container, obj, self.logger, disk_chunk_size=self.disk_chunk_size) if 'X-Delete-At' in file.metadata and \ int(file.metadata['X-Delete-At']) <= time.time(): return HTTPNotFound(request=request) if file.is_deleted(): response_class = HTTPNotFound else: response_class = HTTPAccepted try: file_size = file.get_data_file_size() except (DiskFileError, DiskFileNotExist): file.quarantine() return HTTPNotFound(request=request) metadata = {'X-Timestamp': request.headers['x-timestamp']} metadata.update(val for val in request.headers.iteritems() if val[0].lower().startswith('x-object-meta-')) for header_key in self.allowed_headers: if header_key in request.headers: header_caps = header_key.title() metadata[header_caps] = request.headers[header_key] old_delete_at = int(file.metadata.get('X-Delete-At') or 0) if old_delete_at != new_delete_at: if new_delete_at: self.delete_at_update('PUT', new_delete_at, account, container, obj, request.headers, device) if old_delete_at: self.delete_at_update('DELETE', old_delete_at, account, container, obj, request.headers, device) with file.mkstemp() as (fd, tmppath): file.put(fd, tmppath, metadata, extension='.meta') return response_class(request=request) def PUT(self, request): """Handle HTTP PUT requests for the Swift Object Server.""" try: device, partition, account, container, obj = \ split_path(unquote(request.path), 5, 5, True) except ValueError, err: return HTTPBadRequest(body=str(err), request=request, content_type='text/plain') if self.mount_check and not check_mount(self.devices, device): return Response(status='507 %s is not mounted' % device) if 'x-timestamp' not in request.headers or \ not check_float(request.headers['x-timestamp']): return HTTPBadRequest(body='Missing timestamp', request=request, content_type='text/plain') error_response = check_object_creation(request, obj) if error_response: return error_response new_delete_at = int(request.headers.get('X-Delete-At') or 0) if new_delete_at and new_delete_at < time.time(): return HTTPBadRequest(body='X-Delete-At in past', request=request, content_type='text/plain') file = self.get_DiskFile_obj(self.devices, device, partition, account, container, obj, self.logger, disk_chunk_size=self.disk_chunk_size) orig_timestamp = file.metadata.get('X-Timestamp') upload_expiration = time.time() + self.max_upload_time etag = md5() upload_size = 0 last_sync = 0 with file.mkstemp() as (fd, tmppath): if 'content-length' in request.headers: fallocate(fd, int(request.headers['content-length'])) reader = request.environ['wsgi.input'].read for chunk in iter(lambda: reader(self.network_chunk_size), ''): upload_size += len(chunk) if time.time() > upload_expiration: return HTTPRequestTimeout(request=request) etag.update(chunk) while chunk: written = os.write(fd, chunk) chunk = chunk[written:] # For large files sync every 512MB (by default) written if upload_size - last_sync >= self.bytes_per_sync: tpool.execute(os.fdatasync, fd) drop_buffer_cache(fd, last_sync, upload_size - last_sync) last_sync = upload_size if 'content-length' in request.headers and \ int(request.headers['content-length']) != upload_size: return Response(status='499 Client Disconnect') etag = etag.hexdigest() if 'etag' in request.headers and \ request.headers['etag'].lower() != etag: return HTTPUnprocessableEntity(request=request) content_type = request.headers['content-type'] if self.fs_object and not content_type: content_type = FILE_TYPE if not self.fs_object: metadata = { 'X-Timestamp': request.headers['x-timestamp'], 'Content-Type': request.headers['content-type'], 'ETag': etag, 'Content-Length': str(os.fstat(fd).st_size), } else: metadata = { 'X-Timestamp': request.headers['x-timestamp'], 'Content-Type': request.headers['content-type'], 'ETag': etag, 'Content-Length': str(os.fstat(fd).st_size), X_TYPE: OBJECT, X_OBJECT_TYPE: FILE, } if self.fs_object and \ request.headers['content-type'].lower() == DIR_TYPE: metadata.update({X_OBJECT_TYPE: MARKER_DIR}) metadata.update(val for val in request.headers.iteritems() if val[0].lower().startswith('x-object-meta-') and len(val[0]) > 14) for header_key in self.allowed_headers: if header_key in request.headers: header_caps = header_key.title() metadata[header_caps] = request.headers[header_key] old_delete_at = int(file.metadata.get('X-Delete-At') or 0) if old_delete_at != new_delete_at: if new_delete_at: self.delete_at_update('PUT', new_delete_at, account, container, obj, request.headers, device) if old_delete_at: self.delete_at_update('DELETE', old_delete_at, account, container, obj, request.headers, device) file.put(fd, tmppath, metadata) file.unlinkold(metadata['X-Timestamp']) if not orig_timestamp or \ orig_timestamp < request.headers['x-timestamp']: self.container_update('PUT', account, container, obj, request.headers, {'x-size': file.metadata['Content-Length'], 'x-content-type': file.metadata['Content-Type'], 'x-timestamp': file.metadata['X-Timestamp'], 'x-etag': file.metadata['ETag'], 'x-trans-id': request.headers.get('x-trans-id', '-')}, device) resp = HTTPCreated(request=request, etag=etag) return resp def GET(self, request): """Handle HTTP GET requests for the Swift Object Server.""" try: device, partition, account, container, obj = \ split_path(unquote(request.path), 5, 5, True) except ValueError, err: return HTTPBadRequest(body=str(err), request=request, content_type='text/plain') if self.mount_check and not check_mount(self.devices, device): return Response(status='507 %s is not mounted' % device) file = self.get_DiskFile_obj(self.devices, device, partition, account, container, obj, self.logger, keep_data_fp=True, disk_chunk_size=self.disk_chunk_size) if file.is_deleted() or ('X-Delete-At' in file.metadata and int(file.metadata['X-Delete-At']) <= time.time()): if request.headers.get('if-match') == '*': return HTTPPreconditionFailed(request=request) else: return HTTPNotFound(request=request) try: file_size = file.get_data_file_size() except (DiskFileError, DiskFileNotExist): file.quarantine() return HTTPNotFound(request=request) if request.headers.get('if-match') not in (None, '*') and \ file.metadata['ETag'] not in request.if_match: file.close() return HTTPPreconditionFailed(request=request) if request.headers.get('if-none-match') != None: if file.metadata['ETag'] in request.if_none_match: resp = HTTPNotModified(request=request) resp.etag = file.metadata['ETag'] file.close() return resp try: if_unmodified_since = request.if_unmodified_since except (OverflowError, ValueError): # catches timestamps before the epoch return HTTPPreconditionFailed(request=request) if if_unmodified_since and \ datetime.fromtimestamp(float(file.metadata['X-Timestamp']), UTC) > \ if_unmodified_since: file.close() return HTTPPreconditionFailed(request=request) try: if_modified_since = request.if_modified_since except (OverflowError, ValueError): # catches timestamps before the epoch return HTTPPreconditionFailed(request=request) if if_modified_since and \ datetime.fromtimestamp(float(file.metadata['X-Timestamp']), UTC) < \ if_modified_since: file.close() return HTTPNotModified(request=request) response = Response(app_iter=file, request=request, conditional_response=True) response.headers['Content-Type'] = file.metadata.get('Content-Type', 'application/octet-stream') for key, value in file.metadata.iteritems(): if key.lower().startswith('x-object-meta-') or \ key.lower() in self.allowed_headers: response.headers[key] = value response.etag = file.metadata['ETag'] response.last_modified = float(file.metadata['X-Timestamp']) response.content_length = file_size if response.content_length < KEEP_CACHE_SIZE and \ 'X-Auth-Token' not in request.headers and \ 'X-Storage-Token' not in request.headers: file.keep_cache = True if 'Content-Encoding' in file.metadata: response.content_encoding = file.metadata['Content-Encoding'] response.headers['X-Timestamp'] = file.metadata['X-Timestamp'] return request.get_response(response) def HEAD(self, request): """Handle HTTP HEAD requests for the Swift Object Server.""" try: device, partition, account, container, obj = \ split_path(unquote(request.path), 5, 5, True) except ValueError, err: resp = HTTPBadRequest(request=request) resp.content_type = 'text/plain' resp.body = str(err) return resp if self.mount_check and not check_mount(self.devices, device): return Response(status='507 %s is not mounted' % device) file = self.get_DiskFile_obj(self.devices, device, partition, account, container, obj, self.logger, disk_chunk_size=self.disk_chunk_size) if file.is_deleted() or ('X-Delete-At' in file.metadata and int(file.metadata['X-Delete-At']) <= time.time()): return HTTPNotFound(request=request) try: file_size = file.get_data_file_size() except (DiskFileError, DiskFileNotExist): file.quarantine() return HTTPNotFound(request=request) response = Response(request=request, conditional_response=True) response.headers['Content-Type'] = file.metadata.get('Content-Type', 'application/octet-stream') for key, value in file.metadata.iteritems(): if key.lower().startswith('x-object-meta-') or \ key.lower() in self.allowed_headers: response.headers[key] = value response.etag = file.metadata['ETag'] response.last_modified = float(file.metadata['X-Timestamp']) # Needed for container sync feature response.headers['X-Timestamp'] = file.metadata['X-Timestamp'] response.content_length = file_size if 'Content-Encoding' in file.metadata: response.content_encoding = file.metadata['Content-Encoding'] response.headers['X-Timestamp'] = file.metadata['X-Timestamp'] return response def DELETE(self, request): """Handle HTTP DELETE requests for the Swift Object Server.""" try: device, partition, account, container, obj = \ split_path(unquote(request.path), 5, 5, True) except ValueError, e: return HTTPBadRequest(body=str(e), request=request, content_type='text/plain') if 'x-timestamp' not in request.headers or \ not check_float(request.headers['x-timestamp']): return HTTPBadRequest(body='Missing timestamp', request=request, content_type='text/plain') if self.mount_check and not check_mount(self.devices, device): return Response(status='507 %s is not mounted' % device) response_class = HTTPNoContent file = self.get_DiskFile_obj(self.devices, device, partition, account, container, obj, self.logger, disk_chunk_size=self.disk_chunk_size) if 'x-if-delete-at' in request.headers and \ int(request.headers['x-if-delete-at']) != \ int(file.metadata.get('X-Delete-At') or 0): return HTTPPreconditionFailed(request=request, body='X-If-Delete-At and X-Delete-At do not match') orig_timestamp = file.metadata.get('X-Timestamp') if file.is_deleted(): response_class = HTTPNotFound metadata = { 'X-Timestamp': request.headers['X-Timestamp'], 'deleted': True, } with file.mkstemp() as (fd, tmppath): old_delete_at = int(file.metadata.get('X-Delete-At') or 0) if old_delete_at: self.delete_at_update('DELETE', old_delete_at, account, container, obj, request.headers, device) file.put(fd, tmppath, metadata, extension='.ts') file.unlinkold(metadata['X-Timestamp']) if not orig_timestamp or \ orig_timestamp < request.headers['x-timestamp']: self.container_update('DELETE', account, container, obj, request.headers, {'x-timestamp': metadata['X-Timestamp'], 'x-trans-id': request.headers.get('x-trans-id', '-')}, device) resp = response_class(request=request) return resp def REPLICATE(self, request): """ Handle REPLICATE requests for the Swift Object Server. This is used by the object replicator to get hashes for directories. """ try: device, partition, suffix = split_path( unquote(request.path), 2, 3, True) except ValueError, e: return HTTPBadRequest(body=str(e), request=request, content_type='text/plain') if self.mount_check and not check_mount(self.devices, device): return Response(status='507 %s is not mounted' % device) path = os.path.join(self.devices, device, DATADIR, partition) if not os.path.exists(path): mkdirs(path) suffixes = suffix.split('-') if suffix else [] _junk, hashes = tpool.execute(tpooled_get_hashes, path, recalculate=suffixes) # See tpooled_get_hashes "Hack". if isinstance(hashes, BaseException): raise hashes return Response(body=pickle.dumps(hashes)) def plugin(self, env): if env.get('Gluster_enabled', False): self.fs_object = env.get('fs_object') self.devices = env.get('root') self.mount_check = False else: self.fs_object = None def __call__(self, env, start_response): """WSGI Application entry point for the Swift Object Server.""" start_time = time.time() self.plugin(env) req = Request(env) self.logger.txn_id = req.headers.get('x-trans-id', None) if not check_utf8(req.path_info): res = HTTPPreconditionFailed(body='Invalid UTF8') else: try: if hasattr(self, req.method): res = getattr(self, req.method)(req) else: res = HTTPMethodNotAllowed() except (Exception, Timeout): self.logger.exception(_('ERROR __call__ error with %(method)s' ' %(path)s '), {'method': req.method, 'path': req.path}) res = HTTPInternalServerError(body=traceback.format_exc()) trans_time = time.time() - start_time if self.log_requests: log_line = '%s - - [%s] "%s %s" %s %s "%s" "%s" "%s" %.4f' % ( req.remote_addr, time.strftime('%d/%b/%Y:%H:%M:%S +0000', time.gmtime()), req.method, req.path, res.status.split()[0], res.content_length or '-', req.referer or '-', req.headers.get('x-trans-id', '-'), req.user_agent or '-', trans_time) if req.method == 'REPLICATE': self.logger.debug(log_line) else: self.logger.info(log_line) if req.method in ('PUT', 'DELETE'): slow = self.slow - trans_time if slow > 0: sleep(slow) return res(env, start_response) def app_factory(global_conf, **local_conf): """paste.deploy app factory for creating WSGI object server apps""" conf = global_conf.copy() conf.update(local_conf) return ObjectController(conf)<|fim▁end|>
<|file_name|>CalenderUtils.py<|end_file_name|><|fim▁begin|>''' Created on 2016年9月16日 @author: Administrator ''' import calendar #返回year的日历 def getYear(year): return calendar.calendar(year) #返回year-month的日历 def getMonth(year, month): return calendar.month(year, month) #返回某年某月的第一天是星期几(从0开始, 0是星期一,6是星期日)和该月天数 def getMonthRange(year, month): return calendar.monthrange(year, month) #返回某个月以每一周为元素的序列 def getMonthYear(year, month): return calendar.monthcalendar(year, month) #判断year是是否闰年 def isLeap(year): return calendar.isleap(year) print(getYear(2016))<|fim▁hole|>print(getMonthYear(2016, 10)) print(getMonthRange(2016, 5)) print(isLeap(2016))<|fim▁end|>
print(getMonth(2016, 10))
<|file_name|>test_region.py<|end_file_name|><|fim▁begin|>import numpy as np import pytest import xarray as xr from aospy import Region from aospy.region import ( _get_land_mask, BoundsRect, ) from aospy.internal_names import ( LAT_STR, LON_STR, SFC_AREA_STR, LAND_MASK_STR ) from aospy.utils import Longitude @pytest.fixture() def values_for_reg_arr(): return np.array([[-2., 1.], [np.nan, 5.], [3., 3.], [4., 4.2]]) @pytest.fixture() def data_for_reg_calcs(values_for_reg_arr): lat = [-10., 1., 10., 20.] lon = [1., 10.] sfc_area = [0.5, 1., 0.5, 0.25] land_mask = [1., 1., 0., 1.] lat = xr.DataArray(lat, dims=[LAT_STR], coords=[lat]) lon = xr.DataArray(lon, dims=[LON_STR], coords=[lon]) sfc_area = xr.DataArray(sfc_area, dims=[LAT_STR], coords=[lat]) land_mask = xr.DataArray(land_mask, dims=[LAT_STR], coords=[lat]) sfc_area, _ = xr.broadcast(sfc_area, lon) land_mask, _ = xr.broadcast(land_mask, lon) da = xr.DataArray(values_for_reg_arr, coords=[lat, lon]) da.coords[SFC_AREA_STR] = sfc_area da.coords[LAND_MASK_STR] = land_mask return da _alt_names = {LON_STR: 'LONS', LAT_STR: 'LATS', LAND_MASK_STR: 'lm', SFC_AREA_STR: 'AREA'} @pytest.fixture() def data_reg_alt_names(data_for_reg_calcs): return data_for_reg_calcs.rename(_alt_names) region_no_land_mask = Region( name='test', description='Test region with no land mask', west_bound=0., east_bound=5, south_bound=0, north_bound=90., do_land_mask=False ) region_land_mask = Region( name='test', description='Test region with land mask', west_bound=0., east_bound=5, south_bound=0, north_bound=90., do_land_mask=True ) _expected_mask = [[False, False], [True, False], [True, False], [True, False]] def test_get_land_mask_without_land_mask(data_for_reg_calcs): result = _get_land_mask(data_for_reg_calcs, region_no_land_mask.do_land_mask) expected = 1 assert result == expected def test_get_land_mask_with_land_mask(data_for_reg_calcs): result = _get_land_mask(data_for_reg_calcs, region_land_mask.do_land_mask) expected = data_for_reg_calcs[LAND_MASK_STR] xr.testing.assert_identical(result, expected) def test_get_land_mask_non_aospy_name(data_reg_alt_names): result = _get_land_mask(data_reg_alt_names, region_land_mask.do_land_mask, land_mask_str=_alt_names[LAND_MASK_STR]) expected = data_reg_alt_names[_alt_names[LAND_MASK_STR]] xr.testing.assert_identical(result, expected) def test_region_init():<|fim▁hole|> description='region description', west_bound=0., east_bound=5, south_bound=0, north_bound=90., do_land_mask=True ) assert region.name == 'test' assert region.description == 'region description' assert isinstance(region.mask_bounds, tuple) assert len(region.mask_bounds) == 1 assert isinstance(region.mask_bounds[0], BoundsRect) assert np.all(region.mask_bounds[0] == (Longitude(0.), Longitude(5), 0, 90.)) assert region.do_land_mask is True def test_region_init_mult_rect(): bounds_in = [[1, 2, 3, 4], (-12, -30, 2.3, 9)] region = Region(name='test', mask_bounds=bounds_in) assert isinstance(region.mask_bounds, tuple) assert len(region.mask_bounds) == 2 for (w, e, s, n), bounds in zip(bounds_in, region.mask_bounds): assert isinstance(bounds, tuple) assert np.all(bounds == (Longitude(w), Longitude(e), s, n)) def test_region_init_bad_bounds(): with pytest.raises(ValueError): Region(mask_bounds=[(1, 2, 3)]) Region(mask_bounds=[(1, 2, 3, 4), (1, 2, 3)]) def test_make_mask_single_rect(data_for_reg_calcs): result = region_land_mask._make_mask(data_for_reg_calcs) expected = xr.DataArray(_expected_mask, dims=[LAT_STR, LON_STR], coords={LAT_STR: data_for_reg_calcs[LAT_STR], LON_STR: data_for_reg_calcs[LON_STR]}) xr.testing.assert_equal(result.transpose(), expected) def test_make_mask_mult_rect(data_for_reg_calcs): mask_bounds = (region_land_mask.mask_bounds[0], [0, 360, -20, -5]) region = Region(name='mult_rect', mask_bounds=mask_bounds) result = region._make_mask(data_for_reg_calcs) expected_values = [[True, True], [True, False], [True, False], [True, False]] expected = xr.DataArray(expected_values, dims=[LAT_STR, LON_STR], coords={LAT_STR: data_for_reg_calcs[LAT_STR], LON_STR: data_for_reg_calcs[LON_STR]}) xr.testing.assert_equal(result.transpose(), expected) @pytest.mark.parametrize( 'region', [region_no_land_mask, region_land_mask], ids=['no-land-mask', 'land-mask']) def test_mask_var(data_for_reg_calcs, region): # Test region masks first row and second column of test data. Note that # first element of second row is np.nan in initial dataset. expected_data = [[np.nan, np.nan], [np.nan, np.nan], [3., np.nan], [4., np.nan]] expected = data_for_reg_calcs.copy(deep=True) expected.values = expected_data result = region.mask_var(data_for_reg_calcs) xr.testing.assert_identical(result, expected) @pytest.mark.parametrize( 'region', [region_no_land_mask, region_land_mask], ids=['no-land-mask', 'land-mask']) def test_mask_var_non_aospy_names(data_reg_alt_names, region): # Test region masks first row and second column of test data. Note that # first element of second row is np.nan in initial dataset. expected_data = [[np.nan, np.nan], [np.nan, np.nan], [3., np.nan], [4., np.nan]] expected = data_reg_alt_names.copy(deep=True) expected.values = expected_data result = region.mask_var(data_reg_alt_names, lon_str=_alt_names[LON_STR], lat_str=_alt_names[LAT_STR]) xr.testing.assert_identical(result, expected) def test_ts_no_land_mask(data_for_reg_calcs): result = region_no_land_mask.ts(data_for_reg_calcs) data = data_for_reg_calcs.values sfc_area = data_for_reg_calcs.sfc_area.values exp_numerator = data[2, 0] * sfc_area[2, 0] + data[3, 0] * sfc_area[3, 0] exp_denominator = sfc_area[2, 0] + sfc_area[3, 0] expected = xr.DataArray(exp_numerator / exp_denominator) xr.testing.assert_identical(result, expected) def test_ts_land_mask(data_for_reg_calcs): result = region_land_mask.ts(data_for_reg_calcs) expected = xr.DataArray(data_for_reg_calcs.values[3, 0]) xr.testing.assert_identical(result, expected) _map_to_alt_names = {'lon_str': _alt_names[LON_STR], 'lat_str': _alt_names[LAT_STR], 'land_mask_str': _alt_names[LAND_MASK_STR], 'sfc_area_str': _alt_names[SFC_AREA_STR]} def test_ts_non_aospy_names(data_reg_alt_names): result = region_land_mask.ts(data_reg_alt_names, **_map_to_alt_names) expected = xr.DataArray(data_reg_alt_names.values[3, 0]) xr.testing.assert_identical(result, expected)<|fim▁end|>
region = Region( name='test',
<|file_name|>add_sponsorship.py<|end_file_name|><|fim▁begin|>__author__ = 'tbri' from openerp import models, fields, api, _ class add_sponsorship_wizard(models.TransientModel): _name = 'add_sponsorship_wizard' def _get_all_children(self): c = [] children = self.env['res.partner'].search([('sponsored_child', '=', 'True')]) for n in children: child_ref = '%s %s' % (n.child_ident, n.name) c.append( (n.id, child_ref) ) return c #sponsor_id = fields.Many2one('sponsor') # see partner.py........... ## child_id = fields.Many2one('sponsored_child', domain=[('active','=',True)]) child_id = fields.Selection( _get_all_children , string=_('Child')) sub_sponsor = fields.Many2one('res.partner', _('Sub Sponsor'), domain=[('sub_sponsor','=',True)])<|fim▁hole|> @api.one def data_save(self): print "DATA_SAVE 1", self._context """ DATA_SAVAE! {'lang': 'en_US', 'search_disable_custom_filters': True, 'tz': False, 'uid': 1, 'active_model': 'sponsor', 'active_ids': [1], 'active_id': 1} """ model = self._context['active_model'] active_id = self._context['active_id'] assert model == 'res.partner' sponsor = self.env['res.partner'].browse(active_id) assert sponsor.sponsor print "DATA_SAVE 2", sponsor print "DATA_SAVE 3", self.child_id sponsorship = {'sponsor_id' : active_id, 'sponsored_child' : int(self.child_id), 'start_date' : self.start_date, 'end_date' : self.end_date, 'sub_sponsor' : self.sub_sponsor} print "CREATING SPONSORSHP" self.env['sponsorship'].create( sponsorship) return {'type': 'ir.actions.act_window_close'}<|fim▁end|>
start_date = fields.Date(_('Start date')) end_date = fields.Date(_('End date'))
<|file_name|>WaitForAuthorizationRes.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * The MIT License (MIT) * * Copyright (c) 2015 - 2019 Dr. Marc Mültin (V2G Clarity) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. *******************************************************************************/ package com.v2gclarity.risev2g.evcc.states; import java.util.concurrent.TimeUnit; import com.v2gclarity.risev2g.evcc.session.V2GCommunicationSessionEVCC; import com.v2gclarity.risev2g.shared.enumerations.GlobalValues; import com.v2gclarity.risev2g.shared.enumerations.V2GMessages; import com.v2gclarity.risev2g.shared.messageHandling.ReactionToIncomingMessage; import com.v2gclarity.risev2g.shared.messageHandling.TerminateSession; import com.v2gclarity.risev2g.shared.misc.TimeRestrictions; import com.v2gclarity.risev2g.shared.utils.SecurityUtils; import com.v2gclarity.risev2g.shared.v2gMessages.msgDef.AuthorizationReqType; import com.v2gclarity.risev2g.shared.v2gMessages.msgDef.AuthorizationResType; import com.v2gclarity.risev2g.shared.v2gMessages.msgDef.ChargeParameterDiscoveryReqType; import com.v2gclarity.risev2g.shared.v2gMessages.msgDef.EVSEProcessingType; import com.v2gclarity.risev2g.shared.v2gMessages.msgDef.PaymentOptionType; import com.v2gclarity.risev2g.shared.v2gMessages.msgDef.V2GMessage; public class WaitForAuthorizationRes extends ClientState { public WaitForAuthorizationRes(V2GCommunicationSessionEVCC commSessionContext) { super(commSessionContext); } @Override public ReactionToIncomingMessage processIncomingMessage(Object message) { if (isIncomingMessageValid(message, AuthorizationResType.class)) {<|fim▁hole|> if (authorizationRes.getEVSEProcessing() == null) return new TerminateSession("EVSEProcessing parameter of AuthorizationRes is null. Parameter is mandatory."); if (authorizationRes.getEVSEProcessing().equals(EVSEProcessingType.FINISHED)) { getLogger().debug("EVSEProcessing was set to FINISHED"); getCommSessionContext().setOngoingTimer(0L); getCommSessionContext().setOngoingTimerActive(false); ChargeParameterDiscoveryReqType chargeParameterDiscoveryReq = getChargeParameterDiscoveryReq(); /* * Save this request in case the ChargeParameterDiscoveryRes indicates that the EVSE is * still processing. Then this request can just be resent instead of asking the EV again. */ getCommSessionContext().setChargeParameterDiscoveryReq(chargeParameterDiscoveryReq); return getSendMessage(chargeParameterDiscoveryReq, V2GMessages.CHARGE_PARAMETER_DISCOVERY_RES); } else { getLogger().debug("EVSEProcessing was set to ONGOING"); long elapsedTimeInMs = 0; if (getCommSessionContext().isOngoingTimerActive()) { long elapsedTime = System.nanoTime() - getCommSessionContext().getOngoingTimer(); elapsedTimeInMs = TimeUnit.MILLISECONDS.convert(elapsedTime, TimeUnit.NANOSECONDS); if (elapsedTimeInMs > TimeRestrictions.V2G_EVCC_ONGOING_TIMEOUT) return new TerminateSession("Ongoing timer timed out for AuthorizationReq"); } else { getCommSessionContext().setOngoingTimer(System.nanoTime()); getCommSessionContext().setOngoingTimerActive(true); } // [V2G2-684] demands to send an empty AuthorizationReq if the field EVSEProcessing is set to 'Ongoing' AuthorizationReqType authorizationReq = getAuthorizationReq(null); return getSendMessage(authorizationReq, V2GMessages.AUTHORIZATION_RES, Math.min((TimeRestrictions.V2G_EVCC_ONGOING_TIMEOUT - (int) elapsedTimeInMs), TimeRestrictions.getV2gEvccMsgTimeout(V2GMessages.AUTHORIZATION_RES))); } } else { return new TerminateSession("Incoming message raised an error"); } } }<|fim▁end|>
V2GMessage v2gMessageRes = (V2GMessage) message; AuthorizationResType authorizationRes = (AuthorizationResType) v2gMessageRes.getBody().getBodyElement().getValue();
<|file_name|>commands.cpp<|end_file_name|><|fim▁begin|>#include "commands.h" Commands* Commands::Instance () { static Commands t; return &t; } Commands::Commands() { settings = Settings::Instance(); critterselection = Critterselection::Instance(); registerCmd("quit", &Commands::quit); registerCmd("decreaseenergy", &Commands::decreaseenergy); registerCmd("increaseenergy", &Commands::increaseenergy); registerCmd("dec_foodmaxenergy", &Commands::decreasefoodmaxenergy); registerCmd("inc_foodmaxenergy", &Commands::increasefoodmaxenergy); registerCmd("dec_worldsizex", &Commands::dec_worldsizex); registerCmd("inc_worldsizex", &Commands::inc_worldsizex); registerCmd("dec_worldsizey", &Commands::dec_worldsizey); registerCmd("inc_worldsizey", &Commands::inc_worldsizey); registerCmd("dec_worldsizez", &Commands::dec_worldsizez); registerCmd("inc_worldsizez", &Commands::inc_worldsizez); registerCmd("loadallcritters", &WorldB::loadAllCritters); registerCmd("saveallcritters", &WorldB::saveAllCritters); registerCmd("insertcritter", &WorldB::insertCritter); registerCmd("killhalfofcritters", &WorldB::killHalfOfCritters); registerCmd("camera_resetposition", &WorldB::resetCamera); registerCmd("toggle_pause", &WorldB::togglePause); registerCmd("toggle_sleeper", &WorldB::toggleSleeper); registerCmd("toggle_mouselook", &WorldB::toggleMouselook); registerCmd("critter_select", &WorldB::selectBody); registerCmd("critter_deselect", &WorldB::deselectBody); registerCmd("critter_pick", &WorldB::pickBody); registerCmd("critter_unpick", &WorldB::unpickBody); registerCmd("camera_moveup", &Commands::camera_moveup); registerCmd("camera_movedown", &Commands::camera_movedown); registerCmd("camera_moveforward", &Commands::camera_moveforward); registerCmd("camera_movebackward", &Commands::camera_movebackward); registerCmd("camera_moveleft", &Commands::camera_moveleft); registerCmd("camera_moveright", &Commands::camera_moveright); registerCmd("camera_lookup", &Commands::camera_lookup); registerCmd("camera_lookdown", &Commands::camera_lookdown); registerCmd("camera_lookleft", &Commands::camera_lookleft); registerCmd("camera_lookright", &Commands::camera_lookright); registerCmd("camera_rollleft", &Commands::camera_rollleft); registerCmd("camera_rollright", &Commands::camera_rollright); registerCmd("camera_lookhorizontal", &Commands::camera_lookhorizontal); registerCmd("camera_lookvertical", &Commands::camera_lookvertical); registerCmd("camera_movehorizontal", &Commands::camera_movehorizontal); registerCmd("camera_movevertical", &Commands::camera_movevertical); registerCmd("gui_togglepanel", &Maincanvas::swapChild); registerCmd("gui_toggle", &Maincanvas::swap); registerCmd("settings_saveprofile", &Settings::saveProfile); registerCmd("settings_increase", &Settings::increaseCVar); registerCmd("settings_decrease", &Settings::decreaseCVar); registerCmd("cs_unregister", &Critterselection::unregisterCritterVID); // registerCmd("cs_select", &Critterselection::selectCritterVID); registerCmd("cs_select", &Commands::selectCritter); registerCmd("cs_selectall", &Commands::selectCritterAll); registerCmd("cs_clear", &Critterselection::clear); registerCmd("cs_kill", &WorldB::removeSelectedCritter); registerCmd("cs_killall", &WorldB::removeAllSelectedCritters); registerCmd("cs_duplicate", &WorldB::duplicateSelectedCritter); registerCmd("cs_spawnbrainmutant", &WorldB::spawnBrainMutantSelectedCritter); registerCmd("cs_spawnbodymutant", &WorldB::spawnBodyMutantSelectedCritter); registerCmd("cs_spawnbrainbodymutant", &WorldB::spawnBrainBodyMutantSelectedCritter); registerCmd("cs_duplicateall", &WorldB::duplicateAllSelectedCritters); registerCmd("cs_spawnbrainmutantall", &WorldB::spawnBrainMutantAllSelectedCritters); registerCmd("cs_spawnbodymutantall", &WorldB::spawnBodyMutantAllSelectedCritters); registerCmd("cs_spawnbrainbodymutantall", &WorldB::spawnBrainBodyMutantAllSelectedCritters); registerCmd("cs_feed", &WorldB::feedSelectedCritter); registerCmd("cs_resetage", &WorldB::resetageSelectedCritter); } void Commands::registerCmd(string name, void (Commands::*pt2Func)()) { cmd* c = new cmd(); c->commandtype = T_COMMAND; c->argtype = A_NOARG; c->commandsMember = pt2Func; cmdlist[name] = c; } void Commands::registerCmd(string name, void (Commands::*pt2Func)(const unsigned int&)) { cmd* c = new cmd(); c->commandtype = T_COMMAND; c->argtype = A_UINT; c->commandsMember_uint = pt2Func; cmdlist[name] = c; } void Commands::registerCmd(string name, void (WorldB::*pt2Func)()) { cmd* c = new cmd();<|fim▁hole|> cmdlist[name] = c; } void Commands::registerCmd(string name, void (Maincanvas::*pt2Func)()) { cmd* c = new cmd(); c->commandtype = T_CANVAS; c->argtype = A_NOARG; c->canvasMember = pt2Func; cmdlist[name] = c; } void Commands::registerCmd(string name, void (Maincanvas::*pt2Func)(const string&)) { cmd* c = new cmd(); c->commandtype = T_CANVAS; c->argtype = A_STRING; c->canvasMember_string = pt2Func; cmdlist[name] = c; } void Commands::registerCmd(string name, void (Settings::*pt2Func)()) { cmd* c = new cmd(); c->commandtype = T_SETTINGS; c->argtype = A_NOARG; c->settingsMember = pt2Func; cmdlist[name] = c; } void Commands::registerCmd(string name, void (Settings::*pt2Func)(const string&)) { cmd* c = new cmd(); c->commandtype = T_SETTINGS; c->argtype = A_STRING; c->settingsMember_string= pt2Func; cmdlist[name] = c; } void Commands::registerCmd(string name, void (Critterselection::*pt2Func)()) { cmd* c = new cmd(); c->commandtype = T_CS; c->argtype = A_NOARG; c->critterselectionMember = pt2Func; cmdlist[name] = c; } void Commands::registerCmd(string name, void (Critterselection::*pt2Func)(const unsigned int&)) { cmd* c = new cmd(); c->commandtype = T_CS; c->argtype = A_UINT; c->critterselectionMember_uint= pt2Func; cmdlist[name] = c; } // fixme private void Commands::execCmd(const string& name) { if ( cmdlist[name]->commandtype == T_COMMAND ) (this->*cmdlist[name]->commandsMember)(); else if ( cmdlist[name]->commandtype == T_WORLD ) (world->*cmdlist[name]->worldMember)(); else if ( cmdlist[name]->commandtype == T_CS ) (critterselection->*cmdlist[name]->critterselectionMember)(); else if ( cmdlist[name]->commandtype == T_CANVAS ) (canvas->*cmdlist[name]->canvasMember)(); else if ( cmdlist[name]->commandtype == T_SETTINGS ) (settings->*cmdlist[name]->settingsMember)(); } void Commands::execCmd(const string& name, const string& str) { if ( cmdlist[name]->commandtype == T_CANVAS ) (canvas->*cmdlist[name]->canvasMember_string)(str); else if ( cmdlist[name]->commandtype == T_SETTINGS ) (settings->*cmdlist[name]->settingsMember_string)(str); } void Commands::execCmd(const string& name, const unsigned int& ui) { if ( cmdlist[name]->commandtype == T_CS ) (critterselection->*cmdlist[name]->critterselectionMember_uint)(ui); else if ( cmdlist[name]->commandtype == T_COMMAND ) (this->*cmdlist[name]->commandsMember_uint)(ui); } // fixme public void Commands::execCmd(const cmdsettings& cmds) { // first check if called function exists if ( cmdlist[cmds.name] ) { // check if expected types match if ( cmdlist[cmds.name]->argtype == cmds.argtype ) { if ( cmds.argtype == A_NOARG ) execCmd(cmds.name); else if ( cmds.argtype == A_STRING ) execCmd(cmds.name, cmds.args); else if ( cmds.argtype == A_UINT ) execCmd(cmds.name, cmds.argui); } else cerr << "command '" << cmds.name << "'s args do not match: got " << cmds.argtype << " but expected " << cmdlist[cmds.name]->argtype << endl; } // else // cerr << "command '" << cmds.name << "' does not exist" << endl; } void Commands::quit() { SDL_Quit(); exit(0); } void Commands::selectCritterAll() { critterselection->clear(); for ( unsigned int i=0; i < world->critters.size(); i++ ) critterselection->registerCritter(world->critters[i]); } void Commands::selectCritter(const unsigned int& c) { canvas->swapChild("critterview"); critterselection->selectCritterVID(c); } void Commands::decreaseenergy() { if ( ( (int)settings->getCVar("energy") - 1 ) >= 0 ) { settings->setCVar("energy", settings->getCVar("energy")-1 ); world->freeEnergy -= settings->getCVar("food_maxenergy"); stringstream buf; buf << "energy: " << settings->getCVar("energy"); Logbuffer::Instance()->add(buf); } } void Commands::increaseenergy() { settings->setCVar("energy", settings->getCVar("energy")+1 ); world->freeEnergy += settings->getCVar("food_maxenergy"); stringstream buf; buf << "energy: " << settings->getCVar("energy"); Logbuffer::Instance()->add(buf); } void Commands::decreasefoodmaxenergy() { if ( ( (int)settings->getCVar("food_maxenergy") - 1 ) >= 0 ) { world->freeEnergy -= settings->getCVar("energy"); settings->setCVar("food_maxenergy", settings->getCVar("food_maxenergy")-1 ); } } void Commands::increasefoodmaxenergy() { world->freeEnergy += settings->getCVar("energy"); settings->setCVar("food_maxenergy", settings->getCVar("food_maxenergy")+1 ); } void Commands::dec_worldsizex() { settings->decreaseCVar("worldsizeX"); world->makeFloor(); } void Commands::inc_worldsizex() { settings->increaseCVar("worldsizeX"); world->makeFloor(); } void Commands::dec_worldsizey() { settings->decreaseCVar("worldsizeY"); world->makeFloor(); } void Commands::inc_worldsizey() { settings->increaseCVar("worldsizeY"); world->makeFloor(); } void Commands::dec_worldsizez() { settings->decreaseCVar("worldsizeZ"); world->makeFloor(); } void Commands::inc_worldsizez() { settings->increaseCVar("worldsizeZ"); world->makeFloor(); } // camera ops void Commands::camera_moveup() { world->camera.moveUp(0.01f); world->movePickedBodyFrom(); } void Commands::camera_movedown() { world->camera.moveDown(0.01f); world->movePickedBodyFrom(); } void Commands::camera_moveforward() { world->camera.moveForward(0.01f); world->movePickedBodyFrom(); } void Commands::camera_movebackward() { world->camera.moveBackward(0.01f); world->movePickedBodyFrom(); } void Commands::camera_moveleft() { world->camera.moveLeft(0.01f); world->movePickedBodyFrom(); } void Commands::camera_moveright() { world->camera.moveRight(0.01f); world->movePickedBodyFrom(); } void Commands::camera_lookup() { world->camera.lookUp(0.001f); world->calcMouseDirection(); world->movePickedBodyTo(); } void Commands::camera_lookdown() { world->camera.lookDown(0.001f); world->calcMouseDirection(); world->movePickedBodyTo(); } void Commands::camera_lookleft() { world->camera.lookLeft(0.001f); world->calcMouseDirection(); world->movePickedBodyTo(); } void Commands::camera_lookright() { world->camera.lookRight(0.001f); world->calcMouseDirection(); world->movePickedBodyTo(); } void Commands::camera_rollleft() { world->camera.rollLeft(0.001f); world->calcMouseDirection(); world->movePickedBodyTo(); } void Commands::camera_rollright() { world->camera.rollRight(0.001f); world->calcMouseDirection(); world->movePickedBodyTo(); } void Commands::camera_lookhorizontal() { world->camera.lookRight((float)world->relx/3000); world->calcMouseDirection(); world->movePickedBodyTo(); } void Commands::camera_lookvertical() { world->camera.lookDown((float)world->rely/3000); world->calcMouseDirection(); world->movePickedBodyTo(); } void Commands::camera_movehorizontal() { world->camera.moveRight((float)world->relx/300); world->movePickedBodyFrom(); } void Commands::camera_movevertical() { world->camera.moveDown((float)world->rely/300); world->movePickedBodyFrom(); } Commands::~Commands() { for( cmdit = cmdlist.begin(); cmdit != cmdlist.end(); cmdit++ ) delete cmdit->second; }<|fim▁end|>
c->commandtype = T_WORLD; c->argtype = A_NOARG; c->worldMember = pt2Func;
<|file_name|>persistent.py<|end_file_name|><|fim▁begin|># 2017 Red Hat Inc. # (c) 2017 Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type DOCUMENTATION = """ author: Ansible Core Team connection: persistent short_description: Use a persistent unix socket for connection description: - This is a helper plugin to allow making other connections persistent. version_added: "2.3" options: persistent_command_timeout: type: int description: - Configures, in seconds, the amount of time to wait for a command to return from the remote device. If this timer is exceeded before the command returns, the connection plugin will raise an exception and close default: 10 ini: - section: persistent_connection key: command_timeout env: - name: ANSIBLE_PERSISTENT_COMMAND_TIMEOUT vars: - name: ansible_command_timeout """ import os import pty import json import subprocess import sys from ansible import constants as C from ansible.plugins.connection import ConnectionBase from ansible.module_utils._text import to_text from ansible.module_utils.six.moves import cPickle from ansible.module_utils.connection import Connection as SocketConnection from ansible.errors import AnsibleError try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() class Connection(ConnectionBase): ''' Local based connections ''' transport = 'persistent' has_pipelining = False def _connect(self): self._connected = True return self def exec_command(self, cmd, in_data=None, sudoable=True): display.vvvv('exec_command(), socket_path=%s' % self.socket_path, host=self._play_context.remote_addr) connection = SocketConnection(self.socket_path) out = connection.exec_command(cmd, in_data=in_data, sudoable=sudoable) return 0, out, '' def put_file(self, in_path, out_path): pass def fetch_file(self, in_path, out_path): pass def close(self): self._connected = False def run(self): """Returns the path of the persistent connection socket. Attempts to ensure (within playcontext.timeout seconds) that the socket path exists. If the path exists (or the timeout has expired), returns the socket path. """ display.vvvv('starting connection from persistent connection plugin', host=self._play_context.remote_addr) socket_path = self._start_connection() display.vvvv('local domain socket path is %s' % socket_path, host=self._play_context.remote_addr) setattr(self, '_socket_path', socket_path) return socket_path def _start_connection(self): ''' Starts the persistent connection ''' master, slave = pty.openpty() python = sys.executable def find_file_in_path(filename): # Check $PATH first, followed by same directory as sys.argv[0] paths = os.environ['PATH'].split(os.pathsep) + [os.path.dirname(sys.argv[0])] for dirname in paths: fullpath = os.path.join(dirname, filename) if os.path.isfile(fullpath): return fullpath raise AnsibleError("Unable to find location of '%s'" % filename)<|fim▁hole|> p = subprocess.Popen( [python, find_file_in_path('ansible-connection'), to_text(os.getppid())], stdin=slave, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) stdin = os.fdopen(master, 'wb', 0) os.close(slave) # Need to force a protocol that is compatible with both py2 and py3. # That would be protocol=2 or less. # Also need to force a protocol that excludes certain control chars as # stdin in this case is a pty and control chars will cause problems. # that means only protocol=0 will work. src = cPickle.dumps(self._play_context.serialize(), protocol=0) stdin.write(src) stdin.write(b'\n#END_INIT#\n') src = cPickle.dumps({'ansible_command_timeout': self.get_option('persistent_command_timeout')}, protocol=0) stdin.write(src) stdin.write(b'\n#END_VARS#\n') stdin.flush() (stdout, stderr) = p.communicate() stdin.close() if p.returncode == 0: result = json.loads(to_text(stdout, errors='surrogate_then_replace')) else: try: result = json.loads(to_text(stderr, errors='surrogate_then_replace')) except getattr(json.decoder, 'JSONDecodeError', ValueError): # JSONDecodeError only available on Python 3.5+ result = {'error': to_text(stderr, errors='surrogate_then_replace')} if 'messages' in result: for msg in result.get('messages'): display.vvvv('%s' % msg, host=self._play_context.remote_addr) if 'error' in result: if self._play_context.verbosity > 2: if result.get('exception'): msg = "The full traceback is:\n" + result['exception'] display.display(msg, color=C.COLOR_ERROR) raise AnsibleError(result['error']) return result['socket_path']<|fim▁end|>
<|file_name|>CheckBox.py<|end_file_name|><|fim▁begin|># Copyright 2006 James Tauber and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at<|fim▁hole|># Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pyjamas import DOM from pyjamas.ui.ButtonBase import ButtonBase from pyjamas.ui import Event _CheckBox_unique_id=0; class CheckBox(ButtonBase): def __init__(self, label=None, asHTML=False, **kwargs): if not kwargs.has_key('StyleName'): kwargs['StyleName']="gwt-CheckBox" if label: if asHTML: kwargs['HTML'] = label else: kwargs['Text'] = label self.initElement(DOM.createInputCheck(), **kwargs) def initElement(self, element, **kwargs): self.inputElem = element self.labelElem = DOM.createLabel() ButtonBase.__init__(self, DOM.createSpan(), **kwargs) self.unsinkEvents(Event.FOCUSEVENTS| Event.ONCLICK) DOM.sinkEvents(self.inputElem, Event.FOCUSEVENTS | Event.ONCLICK | DOM.getEventsSunk(self.inputElem)) DOM.appendChild(self.getElement(), self.inputElem) DOM.appendChild(self.getElement(), self.labelElem) uid = "check%d" % self.getUniqueID() DOM.setAttribute(self.inputElem, "id", uid) DOM.setAttribute(self.labelElem, "htmlFor", uid) # emulate static def getUniqueID(self): global _CheckBox_unique_id _CheckBox_unique_id += 1 return _CheckBox_unique_id; def getHTML(self): return DOM.getInnerHTML(self.labelElem) def getName(self): return DOM.getAttribute(self.inputElem, "name") def getText(self): return DOM.getInnerText(self.labelElem) def setChecked(self, checked): DOM.setBooleanAttribute(self.inputElem, "checked", checked) DOM.setBooleanAttribute(self.inputElem, "defaultChecked", checked) def isChecked(self): if self.isAttached(): propName = "checked" else: propName = "defaultChecked" return DOM.getBooleanAttribute(self.inputElem, propName) def isEnabled(self): return not DOM.getBooleanAttribute(self.inputElem, "disabled") def setEnabled(self, enabled): DOM.setBooleanAttribute(self.inputElem, "disabled", not enabled) def setFocus(focused): if focused: Focus.focus(self.inputElem) else: Focus.blur(self.inputElem) def setHTML(self, html): DOM.setInnerHTML(self.labelElem, html) def setName(self, name): DOM.setAttribute(self.inputElem, "name", name) def setTabIndex(self, index): Focus.setTabIndex(self.inputElem, index) def setText(self, text): DOM.setInnerText(self.labelElem, text) def onDetach(self): self.setChecked(self.isChecked()) ButtonBase.onDetach(self)<|fim▁end|>
# # http://www.apache.org/licenses/LICENSE-2.0 #
<|file_name|>interpreter.rs<|end_file_name|><|fim▁begin|>use std::io::{Read, Write}; use state::State; use common::BfResult; use traits::Interpretable; use super::*; impl Interpretable for Program { fn interpret_state<R: Read, W: Write>( &self, mut state: State, mut input: R, mut output: W) -> BfResult<()> { interpret(self, &mut state, &mut input, &mut output) } } fn interpret<R, W>(instructions: &Program, state: &mut State, input: &mut R, output: &mut W) -> BfResult<()> where R: Read, W: Write { use common::Instruction::*; let mut pc = 0; while pc < instructions.len() { match instructions[pc] { Left(count) => state.left(count)?, Right(count) => state.right(count)?, Add(count) => state.up(count), In => state.read(input), Out => state.write(output), JumpZero(address) => { if state.load() == 0 { pc = address as usize; } } JumpNotZero(address) => { if state.load() != 0 { pc = address as usize; } } SetZero => state.store(0),<|fim▁hole|> if state.load() != 0 { let value = state.load(); state.store(0); state.up_pos_offset(offset, value)?; } } OffsetAddLeft(offset) => { if state.load() != 0 { let value = state.load(); state.store(0); state.up_neg_offset(offset, value)?; } } FindZeroRight(offset) => { while state.load() != 0 { state.right(offset)?; } } FindZeroLeft(offset) => { while state.load() != 0 { state.left(offset)?; } } } pc += 1; } Ok(()) } #[cfg(test)] mod tests { use test_helpers::*; #[test] fn hello_world() { assert_parse_interpret(HELLO_WORLD_SRC, "", "Hello, World!"); } #[test] fn factoring() { assert_parse_interpret(FACTOR_SRC, "2\n", "2: 2\n"); assert_parse_interpret(FACTOR_SRC, "3\n", "3: 3\n"); assert_parse_interpret(FACTOR_SRC, "6\n", "6: 2 3\n"); assert_parse_interpret(FACTOR_SRC, "100\n", "100: 2 2 5 5\n"); } fn assert_parse_interpret(program: &[u8], input: &str, output: &str) { let program = ::ast::parse_program(program).unwrap(); let program = ::rle::compile(&program); let program = ::peephole::compile(&program); let program = ::bytecode::compile(&program); assert_interpret(&*program, input.as_bytes(), output.as_bytes()); } }<|fim▁end|>
OffsetAddRight(offset) => {
<|file_name|>_sql_virtual_machine_management_client.py<|end_file_name|><|fim▁begin|># coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy from typing import Any, Awaitable, Optional, TYPE_CHECKING from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient from msrest import Deserializer, Serializer from .. import models from ._configuration import SqlVirtualMachineManagementClientConfiguration from .operations import AvailabilityGroupListenersOperations, Operations, SqlVirtualMachineGroupsOperations, SqlVirtualMachinesOperations if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential class SqlVirtualMachineManagementClient: """The SQL virtual machine management API provides a RESTful set of web APIs that interact with Azure Compute, Network & Storage services to manage your SQL Server virtual machine. The API enables users to create, delete and retrieve a SQL virtual machine, SQL virtual machine group or availability group listener. :ivar availability_group_listeners: AvailabilityGroupListenersOperations operations :vartype availability_group_listeners: azure.mgmt.sqlvirtualmachine.aio.operations.AvailabilityGroupListenersOperations :ivar operations: Operations operations :vartype operations: azure.mgmt.sqlvirtualmachine.aio.operations.Operations :ivar sql_virtual_machine_groups: SqlVirtualMachineGroupsOperations operations :vartype sql_virtual_machine_groups: azure.mgmt.sqlvirtualmachine.aio.operations.SqlVirtualMachineGroupsOperations :ivar sql_virtual_machines: SqlVirtualMachinesOperations operations :vartype sql_virtual_machines: azure.mgmt.sqlvirtualmachine.aio.operations.SqlVirtualMachinesOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: Subscription ID that identifies an Azure subscription. :type subscription_id: str :param base_url: Service URL. Default value is 'https://management.azure.com'. :type base_url: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: self._config = SqlVirtualMachineManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False self.availability_group_listeners = AvailabilityGroupListenersOperations(self._client, self._config, self._serialize, self._deserialize) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.sql_virtual_machine_groups = SqlVirtualMachineGroupsOperations(self._client, self._config, self._serialize, self._deserialize) self.sql_virtual_machines = SqlVirtualMachinesOperations(self._client, self._config, self._serialize, self._deserialize) def _send_request( self, request: HttpRequest,<|fim▁hole|> """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") <HttpRequest [GET], url: 'https://www.example.org/'> >>> response = await client._send_request(request) <AsyncHttpResponse: 200 OK> For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest :keyword bool stream: Whether the response payload will be streamed. Defaults to False. :return: The response of your network call. Does not do error handling on your response. :rtype: ~azure.core.rest.AsyncHttpResponse """ request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) return self._client.send_request(request_copy, **kwargs) async def close(self) -> None: await self._client.close() async def __aenter__(self) -> "SqlVirtualMachineManagementClient": await self._client.__aenter__() return self async def __aexit__(self, *exc_details) -> None: await self._client.__aexit__(*exc_details)<|fim▁end|>
**kwargs: Any ) -> Awaitable[AsyncHttpResponse]:
<|file_name|>science_verification.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python # ========================================================================== # This script performs the ctools science verification. It creates and # analyses the pull distributions for a variety of spectral and spatial # models. Test are generally done in unbinned mode, but also a stacked # analysis test is included. At the end the script produces a JUnit # compliant science verification report. # # Usage: # ./science_verification.py # # -------------------------------------------------------------------------- # # Copyright (C) 2015-2021 Juergen Knoedlseder # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # ========================================================================== import os import csv import math import sys import gammalib #import ctools import cscripts # ========================== # # Generate pull distribution # # ========================== # def generate_pull_distribution(model, obs='NONE', onsrc='NONE', onrad=0.2, \ trials=100, caldb='prod2', irf='South_50h', \ deadc=0.98, edisp=False, \ ra=83.63, dec=22.01, rad=5.0, \ emin=0.1, emax=100.0, enumbins=0, \ duration=1800.0, \ npix=200, binsz=0.02, \ coordsys='CEL', proj='TAN', \ debug=False, chatter=2): """ Generates pull distribution for a given model Parameters ---------- model : str Model XML filename (without .xml extension) obs : str, optional Input observation definition XML filename onsrc : str, optional Name of On source for On/Off analysis onrad : float, optional Radius of On region trials : int, optional Number of trials caldb : str, optional Calibration database irf : str, optional Name of instrument response function deadc : float, optional Deadtime correction factor edisp : bool, optional Use energy dispersion? ra : float, optional Right Ascension of pointing (deg) dec : float, optional Declination of pointing (deg) rad : float, optional Simulation radius (deg) emin : float, optional Minimum energy (TeV) emax : float, optional Maximum energy (TeV) enumbins : int, optional Number of energy bins (0 for unbinned analysis) duration : float, optional Observation duration (sec) npix : int, optional Number of pixels binsz : float, optional Pixel size (deg/pixel) coordsys : str, optional Coordinate system (CEL or GAL) proj : str, optional Sky projection debug : bool, optional Enable debugging? chatter : int, optional Chatter level Returns ------- outfile : str Name of pull distribution output file """ # Derive parameters _, tail = os.path.split(model) inmodel = model + '.xml' outfile = 'cspull_' + tail + '.fits' # Setup pull distribution generation pull = cscripts.cspull() pull['inobs'] = obs pull['inmodel'] = inmodel pull['onsrc'] = onsrc pull['onrad'] = onrad pull['outfile'] = outfile pull['caldb'] = caldb pull['irf'] = irf pull['edisp'] = edisp pull['ra'] = ra pull['dec'] = dec pull['rad'] = rad pull['emin'] = emin pull['emax'] = emax pull['tmin'] = 0.0 pull['tmax'] = duration pull['enumbins'] = enumbins pull['npix'] = npix pull['binsz'] = binsz pull['coordsys'] = coordsys pull['proj'] = proj pull['deadc'] = deadc pull['rad'] = rad pull['ntrials'] = trials pull['debug'] = debug pull['chatter'] = chatter # Generate pull distributions pull.execute() # Return return outfile # ========================= # # Analyse pull distribution # # ========================= # def analyse_pull_distribution(filename): """ Compute mean and standard deviation of pull distribution Parameters ---------- filename : str Pull distribution ASCII file to analyse Returns ------- results : dict Result dictionary """ # Initialise column names, means and standard deviations colnames = [] means = [] stds = [] # Open FITS file fits = gammalib.GFits(filename) # Get pull distribution table table = fits.table('PULL_DISTRIBUTION') nrows = table.nrows() ncolumns = table.ncols() # Loop over columns for i in range(ncolumns): # Get table column column = table[i] # Get column names and initialise mean and standard deviations colnames.append(column.name()) # Compute means and standard deciation mean = 0.0 std = 0.0 samples = 0.0 for row in range(nrows): mean += float(column[row]) std += float(column[row])*float(column[row]) samples += 1.0 std = math.sqrt(std/samples - mean*mean/(samples*samples)) mean /= samples # Store mean and standard deviations means.append(mean) stds.append(std) # Setup results results = {} for i in range(len(colnames)): results[colnames[i]] = {'mean': means[i], 'std': stds[i]} # Return results return results # =================================== # # Test class for science verification # # =================================== # class sciver(gammalib.GPythonTestSuite): """ Test class for science verification """ # Constructor def __init__(self): """ Constructor """ # Call base class constructor gammalib.GPythonTestSuite.__init__(self) # Initialise results self.results = None # Return return # Set test functions def set(self): """ Set all test functions """ # Set test name self.name('Science Verification') # Append background model test self.append(self.bgd, 'Test background model') # Append spectral tests self.append(self.spec_plaw, 'Test power law model') self.append(self.spec_plaw_edisp, 'Test power law model with energy dispersion') self.append(self.spec_plaw_stacked, 'Test power law model with stacked analysis') self.append(self.spec_plaw_onoff, 'Test power law model with On/Off analysis') self.append(self.spec_plaw2, 'Test power law 2 model') self.append(self.spec_smoothbplaw, 'Test smoothly broken power law model') self.append(self.spec_eplaw, 'Test exponentially cut off power law model') self.append(self.spec_supeplaw, 'Test super exponentially cut off power law model') self.append(self.spec_logparabola, 'Test log parabola model') self.append(self.spec_gauss, 'Test Gaussian model') self.append(self.spec_filefct, 'Test file function model') self.append(self.spec_nodes, 'Test nodes model') self.append(self.spec_table, 'Test table model') self.append(self.spec_exponential, 'Test exponential model') # Append spatial tests self.append(self.spat_ptsrc, 'Test point source model') self.append(self.spat_rdisk, 'Test radial disk model') self.append(self.spat_rring, 'Test radial ring model') self.append(self.spat_rgauss, 'Test radial Gaussian model') self.append(self.spat_rshell, 'Test radial shell model') self.append(self.spat_edisk, 'Test elliptical disk model') self.append(self.spat_egauss, 'Test elliptical Gaussian model')<|fim▁hole|> self.append(self.spat_cube, 'Test diffuse cube model') # Return return # Generate and analyse pull distributions def pull(self, model, obs='NONE', onsrc='NONE', trials=100, duration=1800.0, ra=83.63, dec=22.01, rad=5.0, emin=0.1, emax=100.0, enumbins=0, edisp=False, debug=False): """ Generate and analyse pull distributions Parameters ---------- model : str Model XML filename (without .xml extension) obs : str, optional Input observation definition XML filename onsrc : str, optional Name of On source for On/Off analysis trials : int, optional Number of trials duration : float, optional Observation duration (sec) ra : float, optional Right Ascension of pointing (deg) dec : float, optional Declination of pointing (deg) rad : float, optional Simulation radius (deg) emin : float, optional Minimum energy (TeV) emax : float, optional Maximum energy (TeV) enumbins : int, optional Number of energy bins (0 for unbinned analysis) edisp : bool, optional Use energy dispersion? debug : bool, optional Enable debugging? """ # Generate pull distribution outfile = generate_pull_distribution(model, obs=obs, onsrc=onsrc, trials=trials, duration=duration, ra=ra, dec=dec, rad=rad, emin=emin, emax=emax, enumbins=enumbins, edisp=edisp, debug=debug) # Analyse pull distribution self.results = analyse_pull_distribution(outfile) # Return return # Test parameter result def test(self, name, lim_mean=0.4, lim_std=0.2): """ Test one parameter Parameters ---------- name : str Parameter name lim_mean : float, optional Limit for mean value lim_std : float, optional Limit for standard deviation """ # Set minima and maximum mean_min = -lim_mean mean_max = +lim_mean std_min = 1.0-lim_std std_max = 1.0+lim_std # Test mean mean = self.results[name]['mean'] valid = (mean >= mean_min) and (mean <= mean_max) text = 'Mean %.5f of %s should be within [%.2f,%.2f] range' % \ (mean, name, mean_min, mean_max) self.test_assert(valid, text) # Test standard deviation std = self.results[name]['std'] valid = (std >= std_min) and (std <= std_max) text = 'Standard deviation %.5f of %s should be within [%.2f,%.2f]' \ ' range' % (std, name, std_min, std_max) self.test_assert(valid, text) # Return return # Test background model def bgd(self): """ Test background model The standard background model is tested for an observation duration of 50 hours to verify the numerical accuracy of the background model at sufficiently good precision. Most analysis relies on the numerical accuracy of the background model, hence it's important to assure that the model is indeed accurate. """ self.pull('data/sciver/bgd', duration=180000.0) self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test power law model def spec_plaw(self): """ Test power law model """ self.pull('data/sciver/crab_plaw') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test power law model with energy dispersion def spec_plaw_edisp(self): """ Test power law model with energy dispersion """ self.pull('data/sciver/crab_plaw_edisp', edisp=True) self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test power law model with stacked analysis def spec_plaw_stacked(self): """ Test power law model with stacked analysis """ self.pull('data/sciver/crab_plaw_stacked', obs='data/sciver/obs_stacked.xml', emin=0.020, emax=100.0, enumbins=40) self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('BACKGROUNDMODEL_PREFACTOR_PULL') self.test('BACKGROUNDMODEL_INDEX_PULL') return # Test power law model with On/Off analysis def spec_plaw_onoff(self): """ Test power law model with On/Off analysis """ self.pull('data/sciver/crab_plaw_onoff', obs='data/sciver/obs_onoff.xml', onsrc='Crab', emin=0.1, emax=100.0, enumbins=20) self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test power law 2 model def spec_plaw2(self): """ Test power law 2 model """ self.pull('data/sciver/crab_plaw2') self.test('CRAB_PHOTONFLUX_PULL') self.test('CRAB_INDEX_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test smoothly broken power law model def spec_smoothbplaw(self): """ Test smoothly broken power law model """ self.pull('data/sciver/crab_smoothbplaw') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX1_PULL') self.test('CRAB_INDEX2_PULL') self.test('CRAB_BREAKENERGY_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test exponentially cut off power law model def spec_eplaw(self): """ Test exponentially cut off power law model """ self.pull('data/sciver/crab_eplaw') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('CRAB_CUTOFFENERGY_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test super exponentially cut off power law model def spec_supeplaw(self): """ Test super exponentially cut off power law model """ self.pull('data/sciver/crab_supeplaw') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX1_PULL') self.test('CRAB_INDEX2_PULL') self.test('CRAB_CUTOFFENERGY_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test log parabola model def spec_logparabola(self): """ Test log parabola model """ self.pull('data/sciver/crab_logparabola') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('CRAB_CURVATURE_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test Gaussian model def spec_gauss(self): """ Test Gaussian model """ self.pull('data/sciver/crab_gauss') self.test('CRAB_NORMALIZATION_PULL') self.test('CRAB_MEAN_PULL') self.test('CRAB_SIGMA_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test file function model def spec_filefct(self): """ Test file function model """ self.pull('data/sciver/crab_filefct') self.test('CRAB_NORMALIZATION_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test nodes model def spec_nodes(self): """ Test nodes model """ self.pull('data/sciver/crab_nodes') self.test('CRAB_INTENSITY0_PULL') self.test('CRAB_INTENSITY1_PULL') self.test('CRAB_INTENSITY2_PULL') self.test('CRAB_INTENSITY3_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test table model def spec_table(self): """ Test table model """ self.pull('data/sciver/crab_table') self.test('CRAB_NORMALIZATION_PULL') self.test('CRAB_INDEX_PULL') self.test('CRAB_CUTOFF_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test exponential model def spec_exponential(self): """ Test exponential model """ self.pull('data/sciver/crab_exponential') self.test('CRAB_1:PREFACTOR_PULL') self.test('CRAB_1:INDEX_PULL') self.test('CRAB_2:NORMALIZATION_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test point source model def spat_ptsrc(self): """ Test point source model """ self.pull('data/sciver/crab_ptsrc') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('CRAB_RA_PULL') self.test('CRAB_DEC_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test radial disk model def spat_rdisk(self): """ Test radial disk model """ self.pull('data/sciver/crab_rdisk') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('CRAB_RA_PULL') self.test('CRAB_DEC_PULL') self.test('CRAB_RADIUS_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test radial ring model def spat_rring(self): """ Test radial ring model """ self.pull('data/sciver/crab_rring') self.test('CRAB_PREFACTOR_PULL', lim_mean=0.45) # Accept a small bias self.test('CRAB_INDEX_PULL') self.test('CRAB_RA_PULL') self.test('CRAB_DEC_PULL') self.test('CRAB_RADIUS_PULL') self.test('CRAB_WIDTH_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test radial Gaussian model def spat_rgauss(self): """ Test radial Gaussian model """ self.pull('data/sciver/crab_rgauss') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('CRAB_RA_PULL') self.test('CRAB_DEC_PULL') self.test('CRAB_SIGMA_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test radial shell model def spat_rshell(self): """ Test radial shell model """ self.pull('data/sciver/crab_rshell') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('CRAB_RA_PULL') self.test('CRAB_DEC_PULL') self.test('CRAB_RADIUS_PULL') self.test('CRAB_WIDTH_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test elliptical disk model def spat_edisk(self): """ Test elliptical disk model """ self.pull('data/sciver/crab_edisk') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('CRAB_RA_PULL') self.test('CRAB_DEC_PULL') self.test('CRAB_PA_PULL') self.test('CRAB_MINORRADIUS_PULL') self.test('CRAB_MAJORRADIUS_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test elliptical Gaussian model def spat_egauss(self): """ Test elliptical Gaussian model """ self.pull('data/sciver/crab_egauss') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('CRAB_RA_PULL') self.test('CRAB_DEC_PULL') self.test('CRAB_PA_PULL') self.test('CRAB_MINORRADIUS_PULL') self.test('CRAB_MAJORRADIUS_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test diffuse isotropic model def spat_const(self): """ Test diffuse isotropic model """ self.pull('data/sciver/crab_const') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test diffuse map model def spat_map(self): """ Test diffuse map model """ self.pull('data/sciver/crab_map', ra=201.3651, dec=-43.0191) self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test diffuse map model (small ROI) def spat_map_roi(self): """ Test diffuse map model (small ROI) Note that the prefactor seems here a bit biased, which could relate to a possible uncertainty in the flux evaluation. This needs to be investigated further. """ self.pull('data/sciver/crab_map_roi', ra=201.3651, dec=-43.0191, rad=1.5) self.test('CRAB_PREFACTOR_PULL', lim_mean=0.45) # Accept a small bias self.test('CRAB_INDEX_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test diffuse map model (not normalized and scaled) def spat_map_nn(self): """ Test diffuse map model (not normalized and scaled) """ self.pull('data/sciver/crab_map_nn', ra=201.3651, dec=-43.0191, rad=1.5) self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # Test diffuse cube model def spat_cube(self): """ Test diffuse cube model """ self.pull('data/sciver/crab_cube') self.test('CRAB_PREFACTOR_PULL') self.test('CRAB_INDEX_PULL') self.test('BACKGROUND_PREFACTOR_PULL') self.test('BACKGROUND_INDEX_PULL') return # ======================== # # Main routine entry point # # ======================== # if __name__ == '__main__': # Allocate test suite container suites = gammalib.GTestSuites('ctools science verification') # Allocate test suite and append it to the container suite_sciver = sciver() # Setup test suit suite_sciver.set() # Append test suite to container suites.append(suite_sciver) # Create pfiles directory try: os.mkdir('pfiles') except: pass # Copy ctools parameter files into pfiles directory os.system('cp -r ../src/*/*.par pfiles/') # Set PFILES environment variable os.environ['PFILES'] = 'pfiles' # Run test suite success = suites.run() # Save test results suites.save('reports/sciver.xml') # Set return code if success: rc = 0 else: rc = 1 # Exit with return code sys.exit(rc)<|fim▁end|>
self.append(self.spat_const, 'Test diffuse isotropic model') self.append(self.spat_map, 'Test diffuse map model') self.append(self.spat_map_roi, 'Test diffuse map model (small ROI)') self.append(self.spat_map_nn, 'Test diffuse map model (not normalized and scaled)')
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod bulk; mod client; mod coll; mod connstring; mod crud_spec; mod db;<|fim▁hole|>mod wire_protocol;<|fim▁end|>
mod cursor; mod error; mod gridfs;
<|file_name|>timezones.py<|end_file_name|><|fim▁begin|>import pytz priorities = ('US/Pacific', 'US/Mountain', 'US/Central', 'US/Eastern', 'Brazil/East', 'UTC') all_tz = pytz.all_timezones_set.copy() for priority in priorities: all_tz.remove(priority)<|fim▁hole|>all_tz = sorted(list(all_tz)) all_tz[:0] = priorities # prepends list to list # tuples for selection widget all_tz = tuple((tz, tz) for tz in all_tz)<|fim▁end|>
<|file_name|>vpn_validator.py<|end_file_name|><|fim▁begin|># Copyright 2014 Cisco Systems, Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json from neutron.db import l3_db from neutron.extensions import vpnaas from neutron import manager from neutron.plugins.common import constants class VpnReferenceValidator(object): """Baseline validation routines for VPN resources.""" IP_MIN_MTU = {4: 68, 6: 1280} @property def l3_plugin(self): try: return self._l3_plugin except AttributeError: self._l3_plugin = manager.NeutronManager.get_service_plugins().get( constants.L3_ROUTER_NAT) return self._l3_plugin @property def core_plugin(self): try: return self._core_plugin except AttributeError: self._core_plugin = manager.NeutronManager.get_plugin() return self._core_plugin def _check_dpd(self, ipsec_sitecon): """Ensure that DPD timeout is greater than DPD interval.""" if ipsec_sitecon['dpd_timeout'] <= ipsec_sitecon['dpd_interval']: raise vpnaas.IPsecSiteConnectionDpdIntervalValueError( attr='dpd_timeout') def _check_mtu(self, context, mtu, ip_version): if mtu < VpnReferenceValidator.IP_MIN_MTU[ip_version]: raise vpnaas.IPsecSiteConnectionMtuError(mtu=mtu, version=ip_version) def assign_sensible_ipsec_sitecon_defaults(self, ipsec_sitecon, prev_conn=None): """Provide defaults for optional items, if missing. Flatten the nested DPD information, and set default values for any missing information. For connection updates, the previous values will be used as defaults for any missing items. """ if not prev_conn: prev_conn = {'dpd_action': 'hold', 'dpd_interval': 30, 'dpd_timeout': 120} dpd = ipsec_sitecon.get('dpd', {}) ipsec_sitecon['dpd_action'] = dpd.get('action', prev_conn['dpd_action']) ipsec_sitecon['dpd_interval'] = dpd.get('interval', prev_conn['dpd_interval']) ipsec_sitecon['dpd_timeout'] = dpd.get('timeout', prev_conn['dpd_timeout']) def validate_ipsec_site_connection(self, context, ipsec_sitecon, ip_version): """Reference implementation of validation for IPSec connection.""" self._check_dpd(ipsec_sitecon) mtu = ipsec_sitecon.get('mtu') if mtu: self._check_mtu(context, mtu, ip_version) def _check_router(self, context, router_id): router = self.l3_plugin.get_router(context, router_id) if not router.get(l3_db.EXTERNAL_GW_INFO): raise vpnaas.RouterIsNotExternal(router_id=router_id) def _check_subnet_id(self, context, router_id, subnet_id): ports = self.core_plugin.get_ports( context, filters={ 'fixed_ips': {'subnet_id': [subnet_id]}, 'device_id': [router_id]}) if not ports: raise vpnaas.SubnetIsNotConnectedToRouter( subnet_id=subnet_id, router_id=router_id) def validate_vpnservice(self, context, vpnservice): self._check_router(context, vpnservice['router_id']) self._check_subnet_id(context, vpnservice['router_id'], vpnservice['subnet_id']) def validate_vpnservice_ngfw(self, context, vpnservice): try: if vpnservice.has_key('description'): description = json.loads(vpnservice["description"]) else: return except ValueError: raise vpnaas.DescriptionInvalid(description=vpnservice["description"]) else: tenant_router_id = description.get("tenant_router_id", None) if not tenant_router_id: raise vpnaas.TenantRouterIdMustBeSet() subnets = description.get("subnets", []) if not subnets: raise vpnaas.SubnetMustBeSet() <|fim▁hole|> self._check_subnet_id(context, tenant_router_id, subnet)<|fim▁end|>
for subnet in subnets: