prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>PianoHeroFileHandler.java<|end_file_name|><|fim▁begin|>package edu.brown.cs.pianoHeroFiles; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Writer; import java.util.ArrayList; import java.util.HashSet; import java.util.Set; public class PianoHeroFileHandler { public void doFileHandling() { try (Writer writer = new BufferedWriter(new OutputStreamWriter( new FileOutputStream("filename.txt"), "utf-8"))) { File dir = new File("pianoHeroFiles/songImages/"); File actualFile = new File(dir, "hey"); File f = new File("C:\\pianoHeroFiles\\songImages\\kiwiCover.png"); System.out.println(f.getName().toString()); File newDir = new File("pianoHeroFiles/songImages/new/"); File newFile = new File(f, "kiwi2.png"); // System.out.println(actualFile); writer.write("something"); writeFile("pianoHeroFiles/test.txt", "something, brah."); Set<File> allMp3s = new HashSet<File>(); File mp3Dir = new File("pianoHeroFiles/songs/"); getAllFilesAndFolder(mp3Dir, allMp3s); for (File fm : allMp3s) { System.out.println("song:"); System.out.println(fm); if (!fm.isDirectory()) { File dest = new File(fm.getParentFile().toString(), "new" + fm.getName()); copyFile(fm, dest); } } } catch (IOException e) { System.err.println("ERROR: error saving the file"); e.printStackTrace(); } } /** * Saves an image to file directory and returns its saved path as a string * * @param image * file * @return path saved */ public static String saveImage(String imageName) { try { File image = new File("Images/" + imageName); // File imageDir = new File("pianoHeroFiles/songImages/"); File imageDir = new File("src/main/resources/static/img/"); File saveDir = new File("../img/"); File dest = new File(imageDir, "" + image.getName()); File savedDir = new File(saveDir, "" + image.getName()); copyFile(image, dest); return savedDir.getPath(); } catch (IOException e) { System.err.println("ERROR: error saving image"); } return null; } /**<|fim▁hole|> * @param mp3 * file * @return path saved */ public static String saveMp3(String mp3Name) { try { File mp3 = new File("Songs/" + mp3Name); // File songsDir = new File("pianoHeroFiles/songs/"); File songsDir = new File("src/main/resources/static/songs/"); File saveDir = new File("../songs/"); File dest = new File(songsDir, "" + mp3.getName()); File saveDest = new File(saveDir, "" + mp3.getName()); copyFile(mp3, dest); return saveDest.getPath(); } catch (IOException e) { System.err.println("ERROR: error saving image"); } return null; } /** * Saves the 1d-array boolean of keystrokes for a given song id. * * @param keyStrokes * : 1d-array of booleans * @param songId * : int, the song id * @return String of the path where the keystrokes file was saved. */ public static String saveSongKeystrokes(boolean[] keyStrokes, int songId) { String path = "pianoHeroFiles/songKeyStrokes/"; String keyStrokesID = songId + "_keyStrokes.txt"; String keyStrokesPath = path + keyStrokesID; try (PrintWriter writer = new PrintWriter(keyStrokesPath, "UTF-8")) { String line = ""; // this is for the fake, testing songs. if (keyStrokes == null) { System.out.println("FAKEEEEE"); line += "1000100100010100010101"; } for (int i = 0; i < keyStrokes.length; i++) { String add = keyStrokes[i] ? "1" : "0"; line += add; } writer.println(line); writer.close(); } catch (IOException e) { System.err .println("ERROR: error saving keystrokes for songId: " + songId); } return keyStrokesPath; } /** * Saves a 2d-array boolean of keystrokes for a given song id. * * @param keyStrokes * : 2d-array of booleans * @param songId * : int, the song id * @return String of the path where the keystrokes file was saved. */ public static String save2DSongKeystrokes(boolean[][] keyStrokes, int songId) { String path = "pianoHeroFiles/songKeyStrokes/"; String keyStrokesID = songId + "_keyStrokes.txt"; String keyStrokesPath = path + keyStrokesID; try (PrintWriter writer = new PrintWriter(keyStrokesPath, "UTF-8")) { for (int i = 0; i < keyStrokes.length; i++) { String line = ""; for (int j = 0; j < keyStrokes[i].length; j++) { String add = keyStrokes[i][j] ? "1" : "0"; line += add; } writer.println(line); } writer.close(); } catch (IOException e) { System.err .println("ERROR: error saving keystrokes for songId: " + songId); } return keyStrokesPath; } /** * Converts a 1d array of booleans to a 2d array of booleans. * * @param array * : the initial 1d array * @param length * : the length of the partitions. * @return the converted 2d array. */ public static boolean[][] convert1DBooleansTo2D(boolean[] array, int length) { boolean[][] boolean2d = new boolean[length][array.length / length]; for (int i = 0; i < length; i++) { for (int j = 0; j < array.length / length; j++) { boolean2d[i][j] = array[j + i * length]; } } return boolean2d; } /** * Converts a 2d array of booleans to a 1d array of booleans. * * @param array * : the initial 2d array * @return the converted 1d array. */ public static boolean[] convert2DBooleansTo1D(boolean[][] boolean2D) { boolean[] boolean1D = new boolean[boolean2D.length * boolean2D[0].length]; for (int i = 0; i < boolean2D.length; i++) { for (int j = 0; j < boolean2D[i].length; j++) { assert (boolean2D[i].length == boolean2D[0].length); boolean1D[j + i * boolean2D.length] = boolean2D[i][j]; } } return boolean1D; } /** * Returns a file from a given string path * * @param path * string representing the file path * @return the File in the path */ public static File getFileFromPath(String path) { File file = new File(path); return file; } /** * Saves all the files and folders in a set, for a given initial folder. * * @param folder * the initial folder to look all files for. * @param all * the set of files to save on */ public static void getAllFilesAndFolder(File folder, Set<File> all) { all.add(folder); if (folder.isFile()) { return; } for (File file : folder.listFiles()) { if (file.isFile()) { all.add(file); } if (file.isDirectory()) { getAllFilesAndFolder(file, all); } } } /** * Gets the file of the strokes and converts it to a 1d boolean array to * return * * @param fileName * the file name of the keystrokes * @return the 1d array of the strokes */ public static boolean[] getStrokesArray(String fileName) { // This will reference one line at a time String line = null; // FileReader reads text files in the default encoding. try (FileReader fileReader = new FileReader(fileName)) { // It's good to always wrap FileReader in BufferedReader. BufferedReader bufferedReader = new BufferedReader(fileReader); int length = 0; ArrayList<Boolean> results = new ArrayList<Boolean>(); while ((line = bufferedReader.readLine()) != null) { if (line != null) { length = line.length(); for (int i = 0; i < line.length(); i++) { if (line.charAt(i) == '0') { results.add(false); } else if (line.charAt(i) == '1') { results.add(true); } } } } boolean[] results1D = new boolean[results.size()]; for (int i = 0; i < results.size(); i++) { results1D[i] = results.get(i); } bufferedReader.close(); return results1D; // convert1DBooleansTo2D(results1D, length); } catch (FileNotFoundException ex) { System.out.println( "Unable to open file '" + fileName + "'"); } catch (IOException ex) { System.out.println( "Error reading file '" + fileName + "'"); } return null; } /** * Copies a file from an initial source path file to a destination * * @param src * - the initial source file * @param dst * - the destination path file * @throws IOException * exception with file handling */ public static void copyFile(File src, File dst) throws IOException { InputStream in = new FileInputStream(src); OutputStream out = new FileOutputStream(dst); try { // Transfer bytes from in to out byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } } catch (IOException e) { System.err.println("ERROR: couldn't copy file in directory"); } finally { in.close(); out.close(); } } /** * Save the given text to the given filename. * * @param canonicalFilename * Like /Users/al/foo/bar.txt * @param text * All the text you want to save to the file as one String. * @throws IOException */ public static void writeFile(String canonicalFilename, String text) throws IOException { File file = new File(canonicalFilename); BufferedWriter out = new BufferedWriter(new FileWriter(file)); out.write(text); out.close(); } /** * Write an array of bytes to a file. Presumably this is binary data; for * plain text use the writeFile method. */ public static void writeFileAsBytes(String fullPath, byte[] bytes) throws IOException { OutputStream bufferedOutputStream = new BufferedOutputStream( new FileOutputStream(fullPath)); InputStream inputStream = new ByteArrayInputStream(bytes); int token = -1; while ((token = inputStream.read()) != -1) { bufferedOutputStream.write(token); } bufferedOutputStream.flush(); bufferedOutputStream.close(); inputStream.close(); } /** * Convert a byte array to a boolean array. Bit 0 is represented with false, * Bit 1 is represented with 1 * * @param bytes * byte[] * @return boolean[] */ public static boolean[] byteArray2BitArray(byte[] bytes) { boolean[] bits = new boolean[bytes.length * 8]; for (int i = 0; i < bytes.length * 8; i++) { if ((bytes[i / 8] & (1 << (7 - (i % 8)))) > 0) { bits[i] = true; } } return bits; } }<|fim▁end|>
* Saves an mp3 file directory and returns its saved path as a string *
<|file_name|>dms.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "dms")] extern crate rusoto_core; extern crate rusoto_dms; use rusoto_dms::{DatabaseMigrationService, DatabaseMigrationServiceClient, DescribeEndpointsMessage}; use rusoto_core::Region; #[test] fn should_describe_tags() { let client = DatabaseMigrationServiceClient::new(Region::UsEast1); let request = DescribeEndpointsMessage::default(); let result = client.describe_endpoints(request).sync().unwrap(); println!("{:#?}", result);<|fim▁hole|><|fim▁end|>
}
<|file_name|>index.test.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. // See LICENSE.txt for license information. import merge from 'deepmerge'; import {Platform} from 'react-native'; import {Navigation} from 'react-native-navigation'; import configureMockStore from 'redux-mock-store'; import thunk from 'redux-thunk'; import * as NavigationActions from '@actions/navigation'; import {NavigationTypes} from '@constants'; import Preferences from '@mm-redux/constants/preferences'; import EventEmitter from '@mm-redux/utils/event_emitter'; import EphemeralStore from '@store/ephemeral_store'; import intitialState from '@store/initial_state'; import Store from '@store/store'; jest.unmock('@actions/navigation'); const mockStore = configureMockStore([thunk]); const store = mockStore(intitialState); Store.redux = store; // Mock EphemeralStore add/remove modal const add = EphemeralStore.addNavigationModal; const remove = EphemeralStore.removeNavigationModal; EphemeralStore.removeNavigationModal = (componentId) => { remove(componentId); EphemeralStore.removeNavigationComponentId(componentId); }; EphemeralStore.addNavigationModal = (componentId) => { add(componentId); EphemeralStore.addNavigationComponentId(componentId); }; describe('@actions/navigation', () => { const topComponentId = 'top-component-id'; const name = 'name'; const title = 'title'; const theme = Preferences.THEMES.denim; const passProps = { testProp: 'prop', }; const options = { testOption: 'test', }; beforeEach(() => { EphemeralStore.clearNavigationComponents(); EphemeralStore.clearNavigationModals(); // mock that we have a root screen EphemeralStore.addNavigationComponentId(topComponentId); }); // EphemeralStore.getNavigationTopComponentId.mockReturnValue(topComponentId); test('resetToChannel should call Navigation.setRoot', () => { const setRoot = jest.spyOn(Navigation, 'setRoot'); const expectedLayout = { root: { stack: { children: [{ component: { id: 'Channel', name: 'Channel', passProps, options: { layout: { componentBackgroundColor: theme.centerChannelBg, }, statusBar: { visible: true, }, topBar: { visible: false, height: 0, backButton: { visible: false, enableMenu: false, color: theme.sidebarHeaderTextColor, }, background: { color: theme.sidebarHeaderBg, }, }, }, }, }], }, }, }; NavigationActions.resetToChannel(passProps); expect(setRoot).toHaveBeenCalledWith(expectedLayout); }); test('resetToSelectServer should call Navigation.setRoot', () => { const setRoot = jest.spyOn(Navigation, 'setRoot'); const allowOtherServers = false; const expectedLayout = { root: { stack: { children: [{ component: { id: 'SelectServer', name: 'SelectServer', passProps: { allowOtherServers, }, options: { layout: { backgroundColor: theme.centerChannelBg, componentBackgroundColor: theme.centerChannelBg, }, statusBar: { visible: true, }, topBar: { backButton: { color: theme.sidebarHeaderTextColor, enableMenu: false, title: '', }, background: { color: theme.sidebarHeaderBg, }, visible: false, height: 0, }, }, }, }], }, }, }; NavigationActions.resetToSelectServer(allowOtherServers); expect(setRoot).toHaveBeenCalledWith(expectedLayout); }); test('resetToTeams should call Navigation.setRoot', () => { const setRoot = jest.spyOn(Navigation, 'setRoot'); const defaultOptions = { layout: { componentBackgroundColor: theme.centerChannelBg, }, statusBar: { visible: true, }, topBar: { visible: true, title: { color: theme.sidebarHeaderTextColor, text: title, }, backButton: { color: theme.sidebarHeaderTextColor, enableMenu: false, title: '', }, background: { color: theme.sidebarHeaderBg, }, }, }; const expectedLayout = { root: { stack: { children: [{ component: { id: name, name, passProps, options: merge(defaultOptions, options), }, }], }, }, }; NavigationActions.resetToTeams(name, title, passProps, options); expect(setRoot).toHaveBeenCalledWith(expectedLayout); }); test('goToScreen should call Navigation.push', () => { const push = jest.spyOn(Navigation, 'push'); const defaultOptions = { layout: { componentBackgroundColor: theme.centerChannelBg, }, popGesture: true, sideMenu: { left: {enabled: false}, right: {enabled: false}, }, topBar: { animate: true, visible: true, backButton: { color: theme.sidebarHeaderTextColor, enableMenu: false, title: '', testID: 'screen.back.button', }, background: { color: theme.sidebarHeaderBg, }, title: { color: theme.sidebarHeaderTextColor, text: title, }, }, }; const expectedLayout = { component: { id: name, name, passProps, options: merge(defaultOptions, options), }, }; NavigationActions.goToScreen(name, title, passProps, options); expect(push).toHaveBeenCalledWith(topComponentId, expectedLayout); }); test('popTopScreen should call Navigation.pop', () => { const pop = jest.spyOn(Navigation, 'pop'); NavigationActions.popTopScreen(); expect(pop).toHaveBeenCalledWith(topComponentId); const otherComponentId = `other-${topComponentId}`; NavigationActions.popTopScreen(otherComponentId); expect(pop).toHaveBeenCalledWith(otherComponentId); }); test('popToRoot should call Navigation.popToRoot', async () => { const popToRoot = jest.spyOn(Navigation, 'popToRoot'); await NavigationActions.popToRoot(); expect(popToRoot).toHaveBeenCalledWith(topComponentId); }); test('showModal should call Navigation.showModal', () => { const showModal = jest.spyOn(Navigation, 'showModal'); const defaultOptions = { modalPresentationStyle: Platform.select({ios: 'pageSheet', android: 'none'}), layout: { componentBackgroundColor: theme.centerChannelBg, }, statusBar: { visible: true, }, topBar: { animate: true, visible: true, backButton: { color: theme.sidebarHeaderTextColor, enableMenu: false, title: '', }, background: { color: theme.sidebarHeaderBg, }, title: { color: theme.sidebarHeaderTextColor, text: title, }, leftButtonColor: theme.sidebarHeaderTextColor, rightButtonColor: theme.sidebarHeaderTextColor, }, }; const expectedLayout = { stack: { children: [{ component: { id: name, name, passProps, options: merge(defaultOptions, options), }, }], }, }; NavigationActions.showModal(name, title, passProps, options); expect(showModal).toHaveBeenCalledWith(expectedLayout); }); test('showModalOverCurrentContext should call Navigation.showModal', () => { const showModal = jest.spyOn(Navigation, 'showModal'); const showModalOverCurrentContextTitle = ''; const showModalOverCurrentContextOptions = { modalPresentationStyle: 'overCurrentContext', layout: { backgroundColor: 'transparent', componentBackgroundColor: 'transparent', }, topBar: { visible: false, height: 0, }, animations: { showModal: { enter: { enabled: false, }, exit: { enabled: false, }, }, dismissModal: { enter: { enabled: false, }, exit: { enabled: false, }, }, }, }; const showModalOptions = { modalPresentationStyle: Platform.select({ios: 'fullScreen', android: 'none'}), layout: { componentBackgroundColor: theme.centerChannelBg,<|fim▁hole|> }, topBar: { animate: true, visible: true, backButton: { color: theme.sidebarHeaderTextColor, enableMenu: false, title: '', }, background: { color: theme.sidebarHeaderBg, }, title: { color: theme.sidebarHeaderTextColor, text: showModalOverCurrentContextTitle, }, leftButtonColor: theme.sidebarHeaderTextColor, rightButtonColor: theme.sidebarHeaderTextColor, }, }; const defaultOptions = merge(showModalOverCurrentContextOptions, options); const expectedLayout = { stack: { children: [{ component: { id: name, name, passProps, options: merge(showModalOptions, defaultOptions), }, }], }, }; NavigationActions.showModalOverCurrentContext(name, passProps, options); expect(showModal).toHaveBeenCalledWith(expectedLayout); }); test('showSearchModal should call Navigation.showModal', () => { const showModal = jest.spyOn(Navigation, 'showModal'); const showSearchModalName = 'Search'; const showSearchModalTitle = ''; const initialValue = 'initial-value'; const showSearchModalPassProps = {initialValue}; const showSearchModalOptions = { topBar: { visible: false, height: 0, }, }; const defaultOptions = { modalPresentationStyle: Platform.select({ios: 'pageSheet', android: 'none'}), layout: { componentBackgroundColor: theme.centerChannelBg, }, statusBar: { visible: true, }, topBar: { animate: true, visible: true, backButton: { color: theme.sidebarHeaderTextColor, enableMenu: false, title: '', }, background: { color: theme.sidebarHeaderBg, }, title: { color: theme.sidebarHeaderTextColor, text: showSearchModalTitle, }, leftButtonColor: theme.sidebarHeaderTextColor, rightButtonColor: theme.sidebarHeaderTextColor, }, }; const expectedLayout = { stack: { children: [{ component: { id: showSearchModalName, name: showSearchModalName, passProps: showSearchModalPassProps, options: merge(defaultOptions, showSearchModalOptions), }, }], }, }; NavigationActions.showSearchModal(initialValue); expect(showModal).toHaveBeenCalledWith(expectedLayout); }); test('dismissModal should call Navigation.dismissModal', async () => { const dismissModal = jest.spyOn(Navigation, 'dismissModal'); NavigationActions.showModal('First', 'First Modal', passProps, options); await NavigationActions.dismissModal(options); expect(dismissModal).toHaveBeenCalledWith('First', options); }); test('dismissAllModals should call Navigation.dismissAllModals', async () => { const dismissModal = jest.spyOn(Navigation, 'dismissModal'); NavigationActions.showModal('First', 'First Modal', passProps, options); NavigationActions.showModal('Second', 'Second Modal', passProps, options); await NavigationActions.dismissAllModals(options); expect(dismissModal).toHaveBeenCalledTimes(2); }); test('mergeNavigationOptions should call Navigation.mergeOptions', () => { const mergeOptions = jest.spyOn(Navigation, 'mergeOptions'); NavigationActions.mergeNavigationOptions(topComponentId, options); expect(mergeOptions).toHaveBeenCalledWith(topComponentId, options); }); test('setButtons should call Navigation.mergeOptions', () => { const mergeOptions = jest.spyOn(Navigation, 'mergeOptions'); const buttons = { leftButtons: ['left-button'], rightButtons: ['right-button'], }; const setButtonsOptions = { topBar: { ...buttons, }, }; NavigationActions.setButtons(topComponentId, buttons); expect(mergeOptions).toHaveBeenCalledWith(topComponentId, setButtonsOptions); }); test('showOverlay should call Navigation.showOverlay', () => { const showOverlay = jest.spyOn(Navigation, 'showOverlay'); const defaultOptions = { layout: { backgroundColor: 'transparent', componentBackgroundColor: 'transparent', }, overlay: { interceptTouchOutside: false, }, }; const expectedLayout = { component: { name, passProps, options: merge(defaultOptions, options), }, }; NavigationActions.showOverlay(name, passProps, options); expect(showOverlay).toHaveBeenCalledWith(expectedLayout); }); test('dismissOverlay should call Navigation.dismissOverlay', async () => { const dismissOverlay = jest.spyOn(Navigation, 'dismissOverlay'); await NavigationActions.dismissOverlay(topComponentId); expect(dismissOverlay).toHaveBeenCalledWith(topComponentId); }); test('dismissAllModalsAndPopToRoot should call Navigation.dismissAllModals, Navigation.popToRoot, and emit event', async () => { const dismissModal = jest.spyOn(Navigation, 'dismissModal'); const popToRoot = jest.spyOn(Navigation, 'popToRoot'); EventEmitter.emit = jest.fn(); NavigationActions.showModal('First', 'First Modal', passProps, options); NavigationActions.showModal('Second', 'Second Modal', passProps, options); await NavigationActions.dismissAllModalsAndPopToRoot(); expect(dismissModal).toHaveBeenCalledTimes(2); expect(popToRoot).toHaveBeenCalledWith(topComponentId); expect(EventEmitter.emit).toHaveBeenCalledWith(NavigationTypes.NAVIGATION_DISMISS_AND_POP_TO_ROOT); }); });<|fim▁end|>
}, statusBar: { visible: true,
<|file_name|>frame.go<|end_file_name|><|fim▁begin|>package jps type ServerFrame struct { Request IncomingRequest Response OutgoingResponse Session ServerSession ConnectionDetails } type ClientFrame struct { Request OutgoingRequest Response IncomingResponse<|fim▁hole|><|fim▁end|>
Session ClientSession ConnectionDetails }
<|file_name|>list_get_codec.py<|end_file_name|><|fim▁begin|>from hazelcast.serialization.bits import *<|fim▁hole|>from hazelcast.protocol.builtin import StringCodec from hazelcast.protocol.builtin import DataCodec from hazelcast.protocol.builtin import CodecUtil # hex: 0x050F00 _REQUEST_MESSAGE_TYPE = 331520 # hex: 0x050F01 _RESPONSE_MESSAGE_TYPE = 331521 _REQUEST_INDEX_OFFSET = REQUEST_HEADER_SIZE _REQUEST_INITIAL_FRAME_SIZE = _REQUEST_INDEX_OFFSET + INT_SIZE_IN_BYTES def encode_request(name, index): buf = create_initial_buffer(_REQUEST_INITIAL_FRAME_SIZE, _REQUEST_MESSAGE_TYPE) FixSizedTypesCodec.encode_int(buf, _REQUEST_INDEX_OFFSET, index) StringCodec.encode(buf, name, True) return OutboundMessage(buf, True) def decode_response(msg): msg.next_frame() return CodecUtil.decode_nullable(msg, DataCodec.decode)<|fim▁end|>
from hazelcast.protocol.builtin import FixSizedTypesCodec from hazelcast.protocol.client_message import OutboundMessage, REQUEST_HEADER_SIZE, create_initial_buffer
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # encoding: utf-8<|fim▁hole|>from .post import * from .system import * def all(): result = [] models = [] for m in models: result += m.__all__ return result __all__ = all()<|fim▁end|>
from .user import * from .upload import *
<|file_name|>test_cobertura.py<|end_file_name|><|fim▁begin|>import mock import lxml.etree as ET from .utils import make_cobertura def test_parse_path(): from pycobertura import Cobertura xml_path = 'foo.xml' with mock.patch('pycobertura.cobertura.os.path.exists', return_value=True): with mock.patch('pycobertura.cobertura.ET.parse') as mock_parse: cobertura = Cobertura(xml_path) assert cobertura.xml is mock_parse.return_value.getroot.return_value def test_version(): cobertura = make_cobertura() assert cobertura.version == '1.9' def test_line_rate(): cobertura = make_cobertura() assert cobertura.line_rate() == 0.9 def test_line_rate_by_class(): cobertura = make_cobertura() expected_line_rates = { 'Main': 1.0, 'search.BinarySearch': 0.9166666666666666, 'search.ISortedArraySearch': 1.0, 'search.LinearSearch': 0.7142857142857143, } for class_name in cobertura.classes(): assert cobertura.line_rate(class_name) == \ expected_line_rates[class_name] def test_branch_rate(): cobertura = make_cobertura() assert cobertura.branch_rate() == 0.75 def test_branch_rate_by_class(): cobertura = make_cobertura() expected_branch_rates = { 'Main': 1.0, 'search.BinarySearch': 0.8333333333333334, 'search.ISortedArraySearch': 1.0, 'search.LinearSearch': 0.6666666666666666, } for class_name in cobertura.classes(): assert cobertura.branch_rate(class_name) == \ expected_branch_rates[class_name] def test_total_misses(): cobertura = make_cobertura() assert cobertura.total_misses() == 3 def test_missed_statements_by_class_name(): cobertura = make_cobertura() expected_missed_statements = { 'Main': [], 'search.BinarySearch': [24], 'search.ISortedArraySearch': [], 'search.LinearSearch': [19, 24], } for class_name in cobertura.classes(): assert cobertura.missed_statements(class_name) == \ expected_missed_statements[class_name] def test_list_packages(): cobertura = make_cobertura() packages = cobertura.packages() assert packages == ['', 'search'] def test_list_classes(): cobertura = make_cobertura() classes = cobertura.classes() assert classes == [ 'Main', 'search.BinarySearch', 'search.ISortedArraySearch', 'search.LinearSearch' ] def test_hit_lines__by_iterating_over_classes(): cobertura = make_cobertura() expected_lines = { 'Main': [10, 16, 17, 18, 19, 23, 25, 26, 28, 29, 30], 'search.BinarySearch': [12, 16, 18, 20, 21, 23, 25, 26, 28, 29, 31], 'search.ISortedArraySearch': [], 'search.LinearSearch': [9, 13, 15, 16, 17], } for class_name in cobertura.classes(): assert cobertura.hit_statements(class_name) == expected_lines[class_name] <|fim▁hole|> cobertura = make_cobertura() expected_lines = { 'Main': [], 'search.BinarySearch': [24], 'search.ISortedArraySearch': [], 'search.LinearSearch': [19, 20, 21, 22, 23, 24], } for class_name in cobertura.classes(): assert cobertura.missed_lines(class_name) == expected_lines[class_name] def test_total_statements(): cobertura = make_cobertura() assert cobertura.total_statements() == 30 def test_total_statements_by_class(): cobertura = make_cobertura() expected_total_statements = { 'Main': 11, 'search.BinarySearch': 12, 'search.ISortedArraySearch': 0, 'search.LinearSearch': 7, } for class_name in cobertura.classes(): assert cobertura.total_statements(class_name) == \ expected_total_statements[class_name] def test_total_misses(): cobertura = make_cobertura() assert cobertura.total_misses() == 3 def test_total_misses_by_class(): cobertura = make_cobertura() expected_total_misses = { 'Main': 0, 'search.BinarySearch': 1, 'search.ISortedArraySearch': 0, 'search.LinearSearch': 2, } for class_name in cobertura.classes(): assert cobertura.total_misses(class_name) == \ expected_total_misses[class_name] def test_total_hits(): cobertura = make_cobertura() assert cobertura.total_hits() == 27 def test_total_hits_by_class(): cobertura = make_cobertura() expected_total_misses = { 'Main': 11, 'search.BinarySearch': 11, 'search.ISortedArraySearch': 0, 'search.LinearSearch': 5, } for class_name in cobertura.classes(): assert cobertura.total_hits(class_name) == \ expected_total_misses[class_name] def test_filename(): cobertura = make_cobertura() expected_filenames = { 'Main': 'Main.java', 'search.BinarySearch': 'search/BinarySearch.java', 'search.ISortedArraySearch': 'search/ISortedArraySearch.java', 'search.LinearSearch': 'search/LinearSearch.java', } for class_name in cobertura.classes(): assert cobertura.filename(class_name) == \ expected_filenames[class_name] def test_filepath(): base_path = 'foo/bar/baz' cobertura = make_cobertura(base_path=base_path) expected_filepaths = { 'Main': 'foo/bar/baz/Main.java', 'search.BinarySearch': 'foo/bar/baz/search/BinarySearch.java', 'search.ISortedArraySearch': 'foo/bar/baz/search/ISortedArraySearch.java', 'search.LinearSearch': 'foo/bar/baz/search/LinearSearch.java', } for class_name in cobertura.classes(): assert cobertura.filepath(class_name) == \ expected_filepaths[class_name] def test_class_source__sources_not_found(): cobertura = make_cobertura('tests/cobertura.xml') expected_sources = { 'Main': [(0, 'tests/Main.java not found', None)], 'search.BinarySearch': [(0, 'tests/search/BinarySearch.java not found', None)], 'search.ISortedArraySearch': [(0, 'tests/search/ISortedArraySearch.java not found', None)], 'search.LinearSearch': [(0, 'tests/search/LinearSearch.java not found', None)], } for class_name in cobertura.classes(): assert cobertura.class_source(class_name) == expected_sources[class_name] def test_line_statuses(): cobertura = make_cobertura('tests/dummy.source1/coverage.xml') expected_line_statuses = { 'dummy/__init__': [], 'dummy/dummy': [ (1, True), (2, True), (4, True), (5, False), (6, False), ], 'dummy/dummy2': [ (1, True), (2, True), ], 'dummy/dummy4': [ (1, False), (2, False), (4, False), (5, False), (6, False) ], } for class_name in cobertura.classes(): assert cobertura.line_statuses(class_name) == \ expected_line_statuses[class_name] def test_class_source__sources_found(): cobertura = make_cobertura('tests/dummy.source1/coverage.xml') expected_sources = { 'dummy/__init__': [], 'dummy/dummy': [ (1, 'def foo():\n', True), (2, ' pass\n', True), (3, '\n', None), (4, 'def bar():\n', True), (5, " a = 'a'\n", False), (6, " b = 'b'\n", False), ], 'dummy/dummy2': [ (1, 'def baz():\n', True), (2, ' pass\n', True) ], 'dummy/dummy4': [ (1, 'def barbaz():\n', False), (2, ' pass\n', False), (3, '\n', None), (4, 'def foobarbaz():\n', False), (5, ' a = 1 + 3\n', False), (6, ' pass\n', False) ], } for class_name in cobertura.classes(): assert cobertura.class_source(class_name) == \ expected_sources[class_name]<|fim▁end|>
def test_missed_lines():
<|file_name|>sv.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2003-2019, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license */ <|fim▁hole|><|fim▁end|>
CKEDITOR.plugins.setLang( 'removeformat', 'sv', { toolbar: 'Radera formatering' } );
<|file_name|>DragAndDrop.java<|end_file_name|><|fim▁begin|>package gui.dragndrop; import javafx.application.Application; import javafx.scene.Scene; import javafx.scene.control.*; import javafx.scene.input.*; import javafx.scene.layout.*; import javafx.scene.paint.Color; import javafx.stage.Stage; public class DragAndDrop extends Application { public void start(Stage stage) { AnchorPane root = new AnchorPane(); Label source = new Label("DRAG ME"); source.setLayoutX(50); source.setLayoutY(100); source.setScaleX(2.0); source.setScaleY(2.0); root.getChildren().add(source); Label target = new Label("DROP HERE"); target.setLayoutX(250); target.setLayoutY(100); target.setScaleX(2.0); target.setScaleY(2.0); root.getChildren().add(target); source.setOnDragDetected(e->onDragDetected(e)); target.setOnDragOver(e->onDragOver(e)); target.setOnDragEntered(e->onDragEntered(e)); target.setOnDragExited(e->onDragExited(e)); target.setOnDragDropped(e->onDragDropped(e)); source.setOnDragDone(e->onDragDone(e)); Scene scene = new Scene(root, 400, 200); stage.setScene(scene); stage.setTitle("Drag and Drop"); stage.show(); } private void onDragDetected(MouseEvent e) { System.out.println("onDragDetected"); Label source = (Label)e.getSource(); Dragboard db = source.startDragAndDrop(TransferMode.ANY); ClipboardContent content = new ClipboardContent(); content.putString(source.getText()); db.setContent(content); } private void onDragOver(DragEvent e) { System.out.println("onDragOver"); Label target = (Label)e.getSource(); if(e.getGestureSource() != target && e.getDragboard().hasString()) { e.acceptTransferModes(TransferMode.COPY_OR_MOVE); } }<|fim▁hole|> private void onDragEntered(DragEvent e) { System.out.println("onDragEntered"); Label target = (Label)e.getSource(); if(e.getGestureSource() != target && e.getDragboard().hasString()) { target.setTextFill(Color.RED); } } private void onDragExited(DragEvent e) { System.out.println("onDragExited"); Label target = (Label)e.getSource(); target.setTextFill(Color.BLACK); } private void onDragDropped(DragEvent e) { System.out.println("onDragDropped"); Label target = (Label)e.getSource(); Dragboard db = e.getDragboard(); boolean success = false; if(db.hasString()) { target.setText(db.getString()); success = true; } e.setDropCompleted(success); } private void onDragDone(DragEvent e) { System.out.println("onDragDone"); Label source = (Label)e.getSource(); if (e.getTransferMode() == TransferMode.MOVE) { source.setText(""); } } public static void main(String[] args) { launch(args); } }<|fim▁end|>
<|file_name|>versions.py<|end_file_name|><|fim▁begin|># Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import pytest from spack.main import SpackCommand versions = SpackCommand('versions') def test_safe_only_versions(): """Only test the safe versions of a package. (Using the deprecated command line argument) """ versions('--safe-only', 'zlib') def test_safe_versions(): """Only test the safe versions of a package.""" versions('--safe', 'zlib') @pytest.mark.network def test_remote_versions(): """Test a package for which remote versions should be available.""" versions('zlib') <|fim▁hole|> versions('--remote', 'zlib') @pytest.mark.network @pytest.mark.usefixtures('mock_packages') def test_new_versions_only(): """Test a package for which new versions should be available.""" versions('--new', 'brillig') @pytest.mark.network def test_no_versions(): """Test a package for which no remote versions are available.""" versions('converge') @pytest.mark.network def test_no_unchecksummed_versions(): """Test a package for which no unchecksummed versions are available.""" versions('bzip2') @pytest.mark.network def test_versions_no_url(): """Test a package with versions but without a ``url`` attribute.""" versions('graphviz') @pytest.mark.network def test_no_versions_no_url(): """Test a package without versions or a ``url`` attribute.""" versions('opengl')<|fim▁end|>
@pytest.mark.network def test_remote_versions_only(): """Test a package for which remote versions should be available."""
<|file_name|>testing.py<|end_file_name|><|fim▁begin|>from __future__ import annotations import re import sys from typing import Any, Callable, TypeVar from pathlib import Path import numpy as np import numpy.typing as npt AR_f8: npt.NDArray[np.float64] AR_i8: npt.NDArray[np.int64] bool_obj: bool suppress_obj: np.testing.suppress_warnings FT = TypeVar("FT", bound=Callable[..., Any]) def func() -> int: ... def func2( x: npt.NDArray[np.number[Any]], y: npt.NDArray[np.number[Any]], ) -> npt.NDArray[np.bool_]: ... reveal_type(np.testing.KnownFailureException()) # E: KnownFailureException reveal_type(np.testing.IgnoreException()) # E: IgnoreException reveal_type(np.testing.clear_and_catch_warnings(modules=[np.testing])) # E: _clear_and_catch_warnings_without_records reveal_type(np.testing.clear_and_catch_warnings(True)) # E: _clear_and_catch_warnings_with_records reveal_type(np.testing.clear_and_catch_warnings(False)) # E: _clear_and_catch_warnings_without_records reveal_type(np.testing.clear_and_catch_warnings(bool_obj)) # E: clear_and_catch_warnings reveal_type(np.testing.clear_and_catch_warnings.class_modules) # E: tuple[types.ModuleType] reveal_type(np.testing.clear_and_catch_warnings.modules) # E: set[types.ModuleType] with np.testing.clear_and_catch_warnings(True) as c1: reveal_type(c1) # E: builtins.list[warnings.WarningMessage] with np.testing.clear_and_catch_warnings() as c2: reveal_type(c2) # E: None reveal_type(np.testing.suppress_warnings("once")) # E: suppress_warnings reveal_type(np.testing.suppress_warnings()(func)) # E: def () -> builtins.int reveal_type(suppress_obj.filter(RuntimeWarning)) # E: None reveal_type(suppress_obj.record(RuntimeWarning)) # E: list[warnings.WarningMessage] with suppress_obj as c3: reveal_type(c3) # E: suppress_warnings reveal_type(np.testing.verbose) # E: int reveal_type(np.testing.IS_PYPY) # E: bool reveal_type(np.testing.HAS_REFCOUNT) # E: bool reveal_type(np.testing.HAS_LAPACK64) # E: bool reveal_type(np.testing.assert_(1, msg="test")) # E: None reveal_type(np.testing.assert_(2, msg=lambda: "test")) # E: None if sys.platform == "win32" or sys.platform == "cygwin": reveal_type(np.testing.memusage()) # E: builtins.int elif sys.platform == "linux": reveal_type(np.testing.memusage()) # E: Union[None, builtins.int] else: reveal_type(np.testing.memusage()) # E: <nothing> reveal_type(np.testing.jiffies()) # E: builtins.int reveal_type(np.testing.build_err_msg([0, 1, 2], "test")) # E: str reveal_type(np.testing.build_err_msg(range(2), "test", header="header")) # E: str reveal_type(np.testing.build_err_msg(np.arange(9).reshape(3, 3), "test", verbose=False)) # E: str reveal_type(np.testing.build_err_msg("abc", "test", names=["x", "y"])) # E: str reveal_type(np.testing.build_err_msg([1.0, 2.0], "test", precision=5)) # E: str reveal_type(np.testing.assert_equal({1}, {1})) # E: None reveal_type(np.testing.assert_equal([1, 2, 3], [1, 2, 3], err_msg="fail")) # E: None reveal_type(np.testing.assert_equal(1, 1.0, verbose=True)) # E: None reveal_type(np.testing.print_assert_equal('Test XYZ of func xyz', [0, 1], [0, 1])) # E: None reveal_type(np.testing.assert_almost_equal(1.0, 1.1)) # E: None reveal_type(np.testing.assert_almost_equal([1, 2, 3], [1, 2, 3], err_msg="fail")) # E: None reveal_type(np.testing.assert_almost_equal(1, 1.0, verbose=True)) # E: None reveal_type(np.testing.assert_almost_equal(1, 1.0001, decimal=2)) # E: None reveal_type(np.testing.assert_approx_equal(1.0, 1.1)) # E: None reveal_type(np.testing.assert_approx_equal("1", "2", err_msg="fail")) # E: None reveal_type(np.testing.assert_approx_equal(1, 1.0, verbose=True)) # E: None reveal_type(np.testing.assert_approx_equal(1, 1.0001, significant=2)) # E: None reveal_type(np.testing.assert_array_compare(func2, AR_i8, AR_f8, err_msg="test")) # E: None reveal_type(np.testing.assert_array_compare(func2, AR_i8, AR_f8, verbose=True)) # E: None reveal_type(np.testing.assert_array_compare(func2, AR_i8, AR_f8, header="header")) # E: None reveal_type(np.testing.assert_array_compare(func2, AR_i8, AR_f8, precision=np.int64())) # E: None reveal_type(np.testing.assert_array_compare(func2, AR_i8, AR_f8, equal_nan=False)) # E: None reveal_type(np.testing.assert_array_compare(func2, AR_i8, AR_f8, equal_inf=True)) # E: None reveal_type(np.testing.assert_array_equal(AR_i8, AR_f8)) # E: None reveal_type(np.testing.assert_array_equal(AR_i8, AR_f8, err_msg="test")) # E: None reveal_type(np.testing.assert_array_equal(AR_i8, AR_f8, verbose=True)) # E: None reveal_type(np.testing.assert_array_almost_equal(AR_i8, AR_f8)) # E: None reveal_type(np.testing.assert_array_almost_equal(AR_i8, AR_f8, err_msg="test")) # E: None reveal_type(np.testing.assert_array_almost_equal(AR_i8, AR_f8, verbose=True)) # E: None reveal_type(np.testing.assert_array_almost_equal(AR_i8, AR_f8, decimal=1)) # E: None reveal_type(np.testing.assert_array_less(AR_i8, AR_f8)) # E: None reveal_type(np.testing.assert_array_less(AR_i8, AR_f8, err_msg="test")) # E: None reveal_type(np.testing.assert_array_less(AR_i8, AR_f8, verbose=True)) # E: None reveal_type(np.testing.runstring("1 + 1", {})) # E: Any reveal_type(np.testing.runstring("int64() + 1", {"int64": np.int64})) # E: Any reveal_type(np.testing.assert_string_equal("1", "1")) # E: None reveal_type(np.testing.rundocs()) # E: None reveal_type(np.testing.rundocs("test.py")) # E: None reveal_type(np.testing.rundocs(Path("test.py"), raise_on_error=True)) # E: None @np.testing.raises(RuntimeError, RuntimeWarning) def func3(a: int) -> bool: ... reveal_type(func3) # E: def (a: builtins.int) -> builtins.bool reveal_type(np.testing.assert_raises(RuntimeWarning)) # E: _AssertRaisesContext[builtins.RuntimeWarning] reveal_type(np.testing.assert_raises(RuntimeWarning, func3, 5)) # E: None reveal_type(np.testing.assert_raises_regex(RuntimeWarning, r"test")) # E: _AssertRaisesContext[builtins.RuntimeWarning] reveal_type(np.testing.assert_raises_regex(RuntimeWarning, b"test", func3, 5)) # E: None reveal_type(np.testing.assert_raises_regex(RuntimeWarning, re.compile(b"test"), func3, 5)) # E: None class Test: ... def decorate(a: FT) -> FT: return a reveal_type(np.testing.decorate_methods(Test, decorate)) # E: None reveal_type(np.testing.decorate_methods(Test, decorate, None)) # E: None reveal_type(np.testing.decorate_methods(Test, decorate, "test")) # E: None reveal_type(np.testing.decorate_methods(Test, decorate, b"test")) # E: None reveal_type(np.testing.decorate_methods(Test, decorate, re.compile("test"))) # E: None reveal_type(np.testing.measure("for i in range(1000): np.sqrt(i**2)")) # E: float reveal_type(np.testing.measure(b"for i in range(1000): np.sqrt(i**2)", times=5)) # E: float reveal_type(np.testing.assert_allclose(AR_i8, AR_f8)) # E: None reveal_type(np.testing.assert_allclose(AR_i8, AR_f8, rtol=0.005)) # E: None reveal_type(np.testing.assert_allclose(AR_i8, AR_f8, atol=1)) # E: None reveal_type(np.testing.assert_allclose(AR_i8, AR_f8, equal_nan=True)) # E: None reveal_type(np.testing.assert_allclose(AR_i8, AR_f8, err_msg="err")) # E: None reveal_type(np.testing.assert_allclose(AR_i8, AR_f8, verbose=False)) # E: None<|fim▁hole|>reveal_type(np.testing.assert_array_max_ulp(AR_i8, AR_f8, dtype=np.float32)) # E: numpy.ndarray[Any, numpy.dtype[Any]] reveal_type(np.testing.assert_warns(RuntimeWarning)) # E: _GeneratorContextManager[None] reveal_type(np.testing.assert_warns(RuntimeWarning, func3, 5)) # E: bool reveal_type(np.testing.assert_no_warnings()) # E: _GeneratorContextManager[None] reveal_type(np.testing.assert_no_warnings(func3, 5)) # E: bool reveal_type(np.testing.tempdir("test_dir")) # E: _GeneratorContextManager[builtins.str] reveal_type(np.testing.tempdir(prefix=b"test")) # E: _GeneratorContextManager[builtins.bytes] reveal_type(np.testing.tempdir("test_dir", dir=Path("here"))) # E: _GeneratorContextManager[builtins.str] reveal_type(np.testing.temppath("test_dir", text=True)) # E: _GeneratorContextManager[builtins.str] reveal_type(np.testing.temppath(prefix=b"test")) # E: _GeneratorContextManager[builtins.bytes] reveal_type(np.testing.temppath("test_dir", dir=Path("here"))) # E: _GeneratorContextManager[builtins.str] reveal_type(np.testing.assert_no_gc_cycles()) # E: _GeneratorContextManager[None] reveal_type(np.testing.assert_no_gc_cycles(func3, 5)) # E: None reveal_type(np.testing.break_cycles()) # E: None reveal_type(np.testing.TestCase()) # E: unittest.case.TestCase reveal_type(np.testing.run_module_suite(file_to_run="numpy/tests/test_matlib.py")) # E: None<|fim▁end|>
reveal_type(np.testing.assert_array_almost_equal_nulp(AR_i8, AR_f8, nulp=2)) # E: None reveal_type(np.testing.assert_array_max_ulp(AR_i8, AR_f8, maxulp=2)) # E: numpy.ndarray[Any, numpy.dtype[Any]]
<|file_name|>address.py<|end_file_name|><|fim▁begin|>"""Contains the :code:`Address` class, representing a collection of reverse geocoding results. Primarily, this functions as a container for a set of :code:`errorgeopy.Location` objects after a successful reverse geocode, and exposes methods that operate on this set of results, including: - de-duplication - extracting the results that best match a pre-expected outcome - finding the longest common substring of candidate addresses .. moduleauthor Richard Law <[email protected]> """ # import usaddress from fuzzywuzzy import process as fuzzyprocess from errorgeopy.utils import (long_substr, check_location_type, check_addresses_exist) from functools import wraps class Address(object): """Represents a collection of parsed reverse geocoder responses (parsed with geopy). Each member of the :code:`address` property (which is iterable) is a :code:`geopy.address` object. The raw respones can therefore be obtained with: >>> [a.raw for a in Address.addresses] :code:`errorgeopy` adds methods that operate on the collection of addresses that consider the set of addresses as a related set. Attributes: :code:`addresses` (:code:`list`): Collection of reverse geocoding responses from as many services that were capable of returning a response to a query. Each member of the array is a :code:`geopy.location.Location` object. """ @check_location_type def __init__(self, addresses): self._addresses = addresses or None def __unicode__(self): return '\n'.join([str(a) for a in self.addresses]) def __str__(self): return self.__unicode__() @property def addresses(self): """A list of reverse geocoding results from all configured providers. The single central property of the Address object. Notes: Depending on configuration, a provider may return more than one result for a given query. All results from all providers are available in this property, in a *flat* (not nested) structure. The list may be empty if no provider could match an address. """ return self._addresses if self._addresses else [] @check_addresses_exist def dedupe(self, threshold=95): """dedupe(threshold=95) Produces a fuzzily de-duplicated version of the candidate addresses, using :code:`fuzzywuzzy.proccess.dedupe`. Note: See https://github.com/seatgeek/fuzzywuzzy/blob/master/fuzzywuzzy/process.py for detail on the deduplication algorithm implementation. This method does not modify the :code:`Address.addresses`. property. Kwargs: threshold (int): the numerical value (0,100) point at which you expect to find duplicates. Defaults to 95 out of 100, which is higher than the fuzzywuzzy default (70); this higher threshold is used by defauly since addresses are more sensitive to small changes (e.g. "250 Main Street" and "150 Main Street" have a small edit distance when considered as strings, but may have a reasonably large physical distance when considered as physical addresses). Returns: A list of :code:`geopy.location.Location` objects (essentially a filtered list of the original set). """ return fuzzyprocess.dedupe([str(a) for a in self.addresses], threshold) @check_addresses_exist def longest_common_substring(self, dedupe=False): """longest_common_substring(dedupe=False) Returns the longest common substring of the reverse geocoded addresses. Note that if there is no common substring, a string of length zero is returned. If the longest common substring is whitespace, that is stripped, and a string of length zero is returned. Kwargs: dedupe (bool): whether to first perform a deduplication operation on the set of addresses. Defaults to False. Returns: str """ addresses = self.addresses if not dedupe else self.dedupe() return long_substr([str(a) for a in addresses]) @check_addresses_exist def longest_common_sequence(self, separator=' '): """longest_common_sequence(separator='') Returns the longest common sequence of the reverse geocoded addresses... or it would, if I had written this code. Raises: NotImplementedError """ # return utils.longest_common_sequence([str(a) for a in self.addresses], # separator) raise NotImplementedError @check_addresses_exist def regex(self): """regex() Returns a regular expression that matches all of the reverse geocoded addresses... well it would if I had written this code. Raises: NotImplementedError """ raise NotImplementedError @check_addresses_exist def extract(self, expectation, limit=4): """extract(extraction, limit=4) Returns the address or addresses within the set of the reverse geocoded addresses that best match an expected result. Uses fuzzywuzzy under the hood for matching. Args: expectation (str): The string indicating your expected result for a reverse geocoding operation. It should probably look like an address. Results are returned in the order that best meets this expected address. Kwargs:<|fim▁hole|> for the expected address (i.e. if two geocoders resolve to the same string address). Returns: list. Return value is a list of tuples, where each tuple contains a geopy Location, and a matching score based on an extension of the Levenshtien distance between the expectation and the Location's address (a higher score is a better match). The algorithm is implemented by SeatGeek's fuzzywuzzy, and you can read more here: http://chairnerd.seatgeek.com/fuzzywuzzy-fuzzy-string-matching-in-python/ """ extractions = fuzzyprocess.extractBests( expectation, [str(a) for a in self.addresses], limit=limit) result = [] for extraction in extractions: result.extend([(x, extraction[1]) for x in self.addresses if str(x) == extraction[0]]) return result @check_addresses_exist def parse(self): """parse() Raises: NotImplementedError """ # return [usaddress.parse(str(a)) for a in self.addresses] raise NotImplementedError @check_addresses_exist def tag(self, summarise=True): """tag(summarise=True) Raises: NotImplementedError """ # tagged_addresses = [usaddress.tag(str(a)) for a in self.addresses] # if not summarise: # return tags # summarised_tags = OrderedDict() # for address in tagged_addresses[0]: # for k, v in address.items(): # if k not in summarised_tags: # summarised_tags[k] = set([v]) # else: # summarised_tags[k] = summarised_tags[k].add(v) # return summarised_tags, set([a[1] for a in tagged_addresses]) raise NotImplementedError<|fim▁end|>
limit (int): The maximum number of match candidates to retrieve from fuzzywuzzy. The length of the returned array may be longer, if the set of addresses has identical addresses that are good matches
<|file_name|>get_type_id.rs<|end_file_name|><|fim▁begin|>#![feature(core)] extern crate core; #[cfg(test)] mod tests { use core::any::Any; use core::any::TypeId; // #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] // #[stable(feature = "rust1", since = "1.0.0")] // pub struct TypeId { // t: u64, // }<|fim▁hole|> // } type T = i32; #[test] fn get_type_id_test1() { let x: T = 68; let y: T = 500; let x_typeid: TypeId = x.get_type_id(); let y_typeid: TypeId = y.get_type_id(); assert_eq!(x_typeid, y_typeid); } #[test] fn get_type_id_test2() { struct A; let x: A = A; let _: TypeId = x.get_type_id(); } }<|fim▁end|>
// impl<T: Reflect + 'static> Any for T { // fn get_type_id(&self) -> TypeId { TypeId::of::<T>() }
<|file_name|>dispatcher.py<|end_file_name|><|fim▁begin|># This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # # See LICENSE for more details. # # Copyright: Red Hat Inc. 2015 # Author: Cleber Rosa <[email protected]> """Extensions/plugins dispatchers.""" import copy import logging import sys from stevedore import EnabledExtensionManager from .settings import settings from ..utils import stacktrace class Dispatcher(EnabledExtensionManager): """ Base dispatcher for various extension types """ #: Default namespace prefix for Avocado extensions NAMESPACE_PREFIX = 'avocado.plugins.' def __init__(self, namespace, invoke_kwds={}): self.load_failures = [] super(Dispatcher, self).__init__(namespace=namespace, check_func=self.enabled, invoke_on_load=True, invoke_kwds=invoke_kwds, on_load_failure_callback=self.store_load_failure, propagate_map_exceptions=True) def plugin_type(self): """ Subset of entry points namespace for this dispatcher Given an entry point `avocado.plugins.foo`, plugin type is `foo`. If entry point does not conform to the Avocado standard prefix, it's returned unchanged. """ if self.namespace.startswith(self.NAMESPACE_PREFIX): return self.namespace[len(self.NAMESPACE_PREFIX):] else: return self.namespace def fully_qualified_name(self, extension): """ Returns the Avocado fully qualified plugin name :param extension: an Stevedore Extension instance :type extension: :class:`stevedore.extension.Extension` """ return "%s.%s" % (self.plugin_type(), extension.entry_point.name) def settings_section(self): """ Returns the config section name for the plugin type handled by itself """ return "plugins.%s" % self.plugin_type() def enabled(self, extension): disabled = settings.get_value('plugins', 'disable', key_type=list) return self.fully_qualified_name(extension) not in disabled def names(self): """ Returns the names of the discovered extensions This differs from :func:`stevedore.extension.ExtensionManager.names` in that it returns names in a predictable order, by using standard :func:`sorted`. """ return sorted(super(Dispatcher, self).names()) def _init_plugins(self, extensions): super(Dispatcher, self)._init_plugins(extensions) self.extensions.sort(key=lambda x: x.name) configured_order = settings.get_value(self.settings_section(), "order", key_type=list, default=[]) ordered = [] for name in configured_order: for ext in self.extensions: if name == ext.name: ordered.append(ext)<|fim▁hole|> self.extensions = ordered @staticmethod def store_load_failure(manager, entrypoint, exception): manager.load_failures.append((entrypoint, exception)) class CLIDispatcher(Dispatcher): """ Calls extensions on configure/run Automatically adds all the extension with entry points registered under 'avocado.plugins.cli' """ def __init__(self): super(CLIDispatcher, self).__init__('avocado.plugins.cli') class CLICmdDispatcher(Dispatcher): """ Calls extensions on configure/run Automatically adds all the extension with entry points registered under 'avocado.plugins.cli.cmd' """ def __init__(self): super(CLICmdDispatcher, self).__init__('avocado.plugins.cli.cmd') class JobPrePostDispatcher(Dispatcher): """ Calls extensions before Job execution Automatically adds all the extension with entry points registered under 'avocado.plugins.job.prepost' """ def __init__(self): super(JobPrePostDispatcher, self).__init__('avocado.plugins.job.prepost') def map_method(self, method_name, job): for ext in self.extensions: try: if hasattr(ext.obj, method_name): method = getattr(ext.obj, method_name) method(job) except SystemExit: raise except KeyboardInterrupt: raise except: job.log.error('Error running method "%s" of plugin "%s": %s', method_name, ext.name, sys.exc_info()[1]) class ResultDispatcher(Dispatcher): def __init__(self): super(ResultDispatcher, self).__init__('avocado.plugins.result') def map_method(self, method_name, result, job): for ext in self.extensions: try: if hasattr(ext.obj, method_name): method = getattr(ext.obj, method_name) method(result, job) except SystemExit: raise except KeyboardInterrupt: raise except: job.log.error('Error running method "%s" of plugin "%s": %s', method_name, ext.name, sys.exc_info()[1]) class ResultEventsDispatcher(Dispatcher): def __init__(self, args): super(ResultEventsDispatcher, self).__init__( 'avocado.plugins.result_events', invoke_kwds={'args': args}) self.log = logging.getLogger("avocado.app") def map_method(self, method_name, *args): for ext in self.extensions: try: if hasattr(ext.obj, method_name): method = getattr(ext.obj, method_name) method(*args) except SystemExit: raise except KeyboardInterrupt: raise except: self.log.error('Error running method "%s" of plugin "%s": %s', method_name, ext.name, sys.exc_info()[1]) class VarianterDispatcher(Dispatcher): def __init__(self): super(VarianterDispatcher, self).__init__('avocado.plugins.varianter') def __getstate__(self): """ Very fragile pickle which works when all Varianter plugins are available on both machines. TODO: Replace this with per-plugin-refresh-mechanism """ return {"extensions": getattr(self, "extensions")} def __setstate__(self, state): """ Very fragile pickle which works when all Varianter plugins are available on both machines. TODO: Replace this with per-plugin-refresh-mechanism """ self.__init__() self.extensions = state.get("extensions") def _map_method(self, method_name, deepcopy=False, *args, **kwargs): """ :warning: **kwargs are not supported for deepcopy=True """ ret = [] for ext in self.extensions: try: if hasattr(ext.obj, method_name): method = getattr(ext.obj, method_name) if deepcopy: copied_args = [copy.deepcopy(arg) for arg in args] ret.append(method(*copied_args)) else: ret.append(method(*args, **kwargs)) except SystemExit: raise except KeyboardInterrupt: raise except: # catch any exception pylint: disable=W0702 stacktrace.log_exc_info(sys.exc_info(), logger='avocado.debug') log = logging.getLogger("avocado.app") log.error('Error running method "%s" of plugin "%s": %s', method_name, ext.name, sys.exc_info()[1]) return ret def map_method(self, method_name, *args, **kwargs): return self._map_method(method_name, False, *args, **kwargs) def map_method_copy(self, method_name, *args): """ The same as map_method, but use copy.deepcopy on each passed arg """ return self._map_method(method_name, True, *args)<|fim▁end|>
for ext in self.extensions: if ext not in ordered: ordered.append(ext)
<|file_name|>EarthShape.java<|end_file_name|><|fim▁begin|>// EarthShape.java // See copyright.txt for license and terms of use. package earthshape; import java.awt.BorderLayout; import java.awt.Cursor; import java.awt.Image; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.TreeSet; import javax.swing.JCheckBoxMenuItem; import javax.swing.JLabel; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPopupMenu; import javax.swing.KeyStroke; import javax.swing.SwingUtilities; import com.jogamp.opengl.GLCapabilities; import util.FloatUtil; import util.Vector3d; import util.Vector3f; import util.swing.ModalDialog; import util.swing.MyJFrame; import util.swing.MySwingWorker; import util.swing.ProgressDialog; import static util.swing.SwingUtil.log; /** This application demonstrates a procedure for inferring the shape * of a surface (such as the Earth) based on the observed locations of * stars from various locations at a fixed point in time. * * This class, EarthShape, manages UI components external to the 3D * display, such as the menu and status bars. It also contains all of * the code to construct the virtual 3D map using various algorithms. * The 3D display, along with its camera controls, is in EarthMapCanvas. */ public class EarthShape extends MyJFrame { // --------- Constants ---------- /** AWT boilerplate generated serial ID. */ private static final long serialVersionUID = 3903955302894393226L; /** Size of the first constructed square, in kilometers. The * displayed size is then determined by the map scale, which * is normally 1 unit per 1000 km. */ private static final float INITIAL_SQUARE_SIZE_KM = 1000; /** Initial value of 'adjustOrientationDegrees', and the value to * which it is reset when a new square is created. */ private static final float DEFAULT_ADJUST_ORIENTATION_DEGREES = 1.0f; /** Do not let 'adjustOrientationDegrees' go below this value. Below * this value is pointless because the precision of the variance is * not high enough to discriminate among the choices. */ private static final float MINIMUM_ADJUST_ORIENTATION_DEGREES = 1e-7f; // ---------- Instance variables ---------- // ---- Observation Information ---- /** The observations that will drive surface reconstruction. * By default, this will be data from the real world, but it * can be swapped out at the user's option. */ public WorldObservations worldObservations = new RealWorldObservations(); /** Set of stars that are enabled. */ private LinkedHashMap<String, Boolean> enabledStars = new LinkedHashMap<String, Boolean>(); // ---- Interactive surface construction state ---- /** The square we will build upon when the next square is added. * This may be null. */ private SurfaceSquare activeSquare; /** When adjusting the orientation of the active square, this * is how many degrees to rotate at once. */ private float adjustOrientationDegrees = DEFAULT_ADJUST_ORIENTATION_DEGREES; // ---- Options ---- /** When true, star observations are only compared by their * direction. When false, we also consider the location of the * observer, which allows us to handle nearby objects. */ public boolean assumeInfiniteStarDistance = false; /** When true, star observations are only compared by their * direction, and furthermore, only the elevation, ignoring * azimuth. This is potentially interesting because, in * practice, it is difficult to accurately measure azimuth * with just a hand-held sextant. */ public boolean onlyCompareElevations = false; /** When analyzing the solution space, use this many points of * rotation on each side of 0, for each axis. Note that the * algorithm is cubic in this parameter. */ private int solutionAnalysisPointsPerSide = 20; /** If the Sun's elevation is higher than this value, then * we cannot see any stars. */ private float maximumSunElevation = -5; /** When true, take the Sun's elevation into account. */ private boolean useSunElevation = true; /** When true, use the "new" orientation algorithm that * repeatedly applies the recommended command. Otherwise, * use the older one based on average deviation. The old * algorithm is faster, but slightly less accurate, and * does not mimic the process a user would use to manually * adjust a square's orientation. */ private boolean newAutomaticOrientationAlgorithm = true; // ---- Widgets ---- /** Canvas showing the Earth surface built so far. */ private EarthMapCanvas emCanvas; /** Widget to show various state variables such as camera position. */ private JLabel statusLabel; /** Selected square info panel on right side. */ private InfoPanel infoPanel; // Menu items to toggle various options. private JCheckBoxMenuItem drawCoordinateAxesCBItem; private JCheckBoxMenuItem drawCrosshairCBItem; private JCheckBoxMenuItem drawWireframeSquaresCBItem; private JCheckBoxMenuItem drawCompassesCBItem; private JCheckBoxMenuItem drawSurfaceNormalsCBItem; private JCheckBoxMenuItem drawCelestialNorthCBItem; private JCheckBoxMenuItem drawStarRaysCBItem; private JCheckBoxMenuItem drawUnitStarRaysCBItem; private JCheckBoxMenuItem drawBaseSquareStarRaysCBItem; private JCheckBoxMenuItem drawTravelPathCBItem; private JCheckBoxMenuItem drawActiveSquareAtOriginCBItem; private JCheckBoxMenuItem useSunElevationCBItem; private JCheckBoxMenuItem invertHorizontalCameraMovementCBItem; private JCheckBoxMenuItem invertVerticalCameraMovementCBItem; private JCheckBoxMenuItem newAutomaticOrientationAlgorithmCBItem; private JCheckBoxMenuItem assumeInfiniteStarDistanceCBItem; private JCheckBoxMenuItem onlyCompareElevationsCBItem; private JCheckBoxMenuItem drawWorldWireframeCBItem; private JCheckBoxMenuItem drawWorldStarsCBItem; private JCheckBoxMenuItem drawSkyboxCBItem; // ---------- Methods ---------- public EarthShape() { super("EarthShape"); this.setName("EarthShape (JFrame)"); this.setLayout(new BorderLayout()); this.setIcon(); for (String starName : this.worldObservations.getAllStars()) { // Initially all stars are enabled. this.enabledStars.put(starName, true); } this.setSize(1150, 800); this.setLocationByPlatform(true); this.setupJOGL(); this.buildMenuBar(); // Status bar on bottom. this.statusLabel = new JLabel(); this.statusLabel.setName("statusLabel"); this.add(this.statusLabel, BorderLayout.SOUTH); // Info panel on right. this.add(this.infoPanel = new InfoPanel(), BorderLayout.EAST); this.updateUIState(); } /** Initialize the JOGL library, then create a GL canvas and * associate it with this window. */ private void setupJOGL() { log("creating GLCapabilities"); // This call takes about one second to complete, which is // pretty slow... GLCapabilities caps = new GLCapabilities(null /*profile*/); log("caps: "+caps); caps.setDoubleBuffered(true); caps.setHardwareAccelerated(true); // Build the object that will show the surface. this.emCanvas = new EarthMapCanvas(this, caps); // Associate the canvas with 'this' window. this.add(this.emCanvas, BorderLayout.CENTER); } /** Set the window icon to my application's icon. */ private void setIcon() { try { URL url = this.getClass().getResource("globe-icon.png"); Image img = Toolkit.getDefaultToolkit().createImage(url); this.setIconImage(img); } catch (Exception e) { System.err.println("Failed to set app icon: "+e.getMessage()); } } private void showAboutBox() { String about = "EarthShape\n"+ "Copyright 2017 Scott McPeak\n"+ "\n"+ "Published under the 2-clause BSD license.\n"+ "See copyright.txt for details.\n"; JOptionPane.showMessageDialog(this, about, "About", JOptionPane.INFORMATION_MESSAGE); } private void showKeyBindings() { String bindings = "Left click in 3D view to enter FPS controls mode.\n"+ "Esc - Leave FPS mode.\n"+ "W/A/S/D - Move camera horizontally when in FPS mode.\n"+ "Space/Z - Move camera up or down in FPS mode.\n"+ "Left click on square in FPS mode to make it active.\n"+ "\n"+ "T - Reconstruct Earth from star data.\n"+ "\n"+ "C - Toggle compass or Earth texture.\n"+ "P - Toggle star rays for active square.\n"+ "G - Move camera to active square's center.\n"+ "H - Build complete Earth using assumed sphere.\n"+ "R - Build Earth using assumed sphere and random walk.\n"+ "\n"+ "U/O - Roll active square left or right.\n"+ "I/K - Pitch active square down or up.\n"+ "J/L - Yaw active square left or right.\n"+ "1 - Reset adjustment amount to 1 degree.\n"+ "-/= - Decrease or increase adjustment amount.\n"+ "; (semicolon) - Make recommended active square adjustment.\n"+ "/ (slash) - Automatically orient active square.\n"+ "\' (apostrophe) - Analyze rotation solution space for active square.\n"+ "\n"+ "N - Start a new surface.\n"+ "M - Add a square adjacent to the active square.\n"+ "Ctrl+N/S/E/W - Add a square to the N/S/E/W and automatically adjust it.\n"+ ", (comma) - Move to previous active square.\n"+ ". (period) - Move to next active square.\n"+ "Delete - Delete active square.\n"+ "\n"+ "0/PgUp/PgDn - Change animation state (not for general use)\n"+ ""; JOptionPane.showMessageDialog(this, bindings, "Bindings", JOptionPane.INFORMATION_MESSAGE); } /** Build the menu bar and attach it to 'this'. */ private void buildMenuBar() { // This ensures that the menu items do not appear underneath // the GL canvas. Strangely, this problem appeared suddenly, // after I made a seemingly irrelevant change (putting a // scroll bar on the info panel). But this call solves it, // so whatever. JPopupMenu.setDefaultLightWeightPopupEnabled(false); JMenuBar menuBar = new JMenuBar(); this.setJMenuBar(menuBar); // Used keys: // a - Move camera left // b // c - Toggle compass // d - Move camera right // e // f // g - Go to selected square's center // h - Build spherical Earth // i - Pitch active square down // j - Yaw active square left // k - Pitch active square up // l - Yaw active square right // m - Add adjacent square to surface // n - Build new surface // o - Roll active square right // p - Toggle star rays for active square // q // r - Build with random walk // s - Move camera backward // t - Build full Earth with star data // u - Roll active square left // v // w - Move camera forward // x // y // z - Move camera down // 0 - Reset animation state to 0 // 1 - Reset adjustment amount to 1 // - - Decrease adjustment amount // = - Increase adjustment amount // ; - One recommended rotation adjustment // ' - Analyze solution space // , - Select previous square // . - Select next square // / - Automatically orient active square // Space - Move camera up // Delete - Delete active square // Enter - enter FPS mode // Esc - leave FPS mode // Ins - canned commands // PgUp - Decrement animation state // PgDn - Increment animation state // Ctrl+E - build and orient to the East // Ctrl+W - build and orient to the West // Ctrl+N - build and orient to the North // Ctrl+S - build and orient to the South menuBar.add(this.buildFileMenu()); menuBar.add(this.buildDrawMenu()); menuBar.add(this.buildBuildMenu()); menuBar.add(this.buildSelectMenu()); menuBar.add(this.buildEditMenu()); menuBar.add(this.buildNavigateMenu()); menuBar.add(this.buildAnimateMenu()); menuBar.add(this.buildOptionsMenu()); menuBar.add(this.buildHelpMenu()); } private JMenu buildFileMenu() { JMenu menu = new JMenu("File"); addMenuItem(menu, "Use real world astronomical observations", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.changeObservations(new RealWorldObservations()); } }); menu.addSeparator(); addMenuItem(menu, "Use model: spherical Earth with nearby stars", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.changeObservations(new CloseStarObservations()); } }); addMenuItem(menu, "Use model: azimuthal equidistant projection flat Earth", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.changeObservations(new AzimuthalEquidistantObservations()); } }); addMenuItem(menu, "Use model: bowl-shaped Earth", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.changeObservations(new BowlObservations()); } }); addMenuItem(menu, "Use model: saddle-shaped Earth", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.changeObservations(new SaddleObservations()); } }); menu.addSeparator(); addMenuItem(menu, "Exit", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.dispose(); } }); return menu; } private JMenu buildDrawMenu() { JMenu drawMenu = new JMenu("Draw"); this.drawCoordinateAxesCBItem = addCBMenuItem(drawMenu, "Draw X/Y/Z coordinate axes", null, this.emCanvas.drawAxes, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawAxes(); } }); this.drawCrosshairCBItem = addCBMenuItem(drawMenu, "Draw crosshair when in FPS camera mode", null, this.emCanvas.drawCrosshair, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawCrosshair(); } }); this.drawWireframeSquaresCBItem = addCBMenuItem(drawMenu, "Draw squares as wireframes with translucent squares", null, this.emCanvas.drawWireframeSquares, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawWireframeSquares(); } }); this.drawCompassesCBItem = addCBMenuItem(drawMenu, "Draw squares with compass texture (vs. world map)", KeyStroke.getKeyStroke('c'), this.emCanvas.drawCompasses, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawCompasses(); } }); this.drawSurfaceNormalsCBItem = addCBMenuItem(drawMenu, "Draw surface normals", null, this.emCanvas.drawSurfaceNormals, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawSurfaceNormals(); } }); this.drawCelestialNorthCBItem = addCBMenuItem(drawMenu, "Draw celestial North", null, this.emCanvas.drawCelestialNorth, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawCelestialNorth(); } }); this.drawStarRaysCBItem = addCBMenuItem(drawMenu, "Draw star rays for active square", KeyStroke.getKeyStroke('p'), this.activeSquareDrawsStarRays(), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawStarRays(); } }); this.drawUnitStarRaysCBItem = addCBMenuItem(drawMenu, "Draw star rays as unit vectors", null, this.emCanvas.drawUnitStarRays, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawUnitStarRays(); } }); this.drawBaseSquareStarRaysCBItem = addCBMenuItem(drawMenu, "Draw star rays for the base square too, on the active square", null, this.emCanvas.drawBaseSquareStarRays, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawBaseSquareStarRays(); } }); this.drawTravelPathCBItem = addCBMenuItem(drawMenu, "Draw the travel path from base square to active square", null, this.emCanvas.drawTravelPath, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawTravelPath(); } }); this.drawActiveSquareAtOriginCBItem = addCBMenuItem(drawMenu, "Draw the active square at the 3D coordinate origin", null, this.emCanvas.drawActiveSquareAtOrigin, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawActiveSquareAtOrigin(); } }); this.drawWorldWireframeCBItem = addCBMenuItem(drawMenu, "Draw world wireframe (if one is in use)", null, this.emCanvas.drawWorldWireframe, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawWorldWireframe(); } }); this.drawWorldStarsCBItem = addCBMenuItem(drawMenu, "Draw world stars (if in use)", null, this.emCanvas.drawWorldStars, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawWorldStars(); } }); this.drawSkyboxCBItem = addCBMenuItem(drawMenu, "Draw skybox", null, this.emCanvas.drawSkybox, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.toggleDrawSkybox(); } }); addMenuItem(drawMenu, "Set skybox distance...", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.setSkyboxDistance(); } }); addMenuItem(drawMenu, "Turn off all star rays", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.turnOffAllStarRays(); } }); return drawMenu; } private JMenu buildBuildMenu() { JMenu buildMenu = new JMenu("Build"); addMenuItem(buildMenu, "Build Earth using active observational data or model", KeyStroke.getKeyStroke('t'), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.buildEarthSurfaceFromStarData(); } }); addMenuItem(buildMenu, "Build complete Earth using assumed sphere", KeyStroke.getKeyStroke('h'), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.buildSphericalEarthSurfaceWithLatLong(); } }); addMenuItem(buildMenu, "Build partial Earth using assumed sphere and random walk", KeyStroke.getKeyStroke('r'), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.buildSphericalEarthWithRandomWalk(); } }); addMenuItem(buildMenu, "Start a new surface using star data", KeyStroke.getKeyStroke('n'), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.startNewSurface(); } }); addMenuItem(buildMenu, "Add another square to the surface", KeyStroke.getKeyStroke('m'), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.buildNextSquare(); } }); buildMenu.addSeparator(); addMenuItem(buildMenu, "Create new square 9 degrees to the East and orient it automatically", KeyStroke.getKeyStroke(KeyEvent.VK_E, InputEvent.CTRL_DOWN_MASK), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.createAndAutomaticallyOrientActiveSquare( 0 /*deltLatitude*/, +9 /*deltaLongitude*/); } }); addMenuItem(buildMenu, "Create new square 9 degrees to the West and orient it automatically", KeyStroke.getKeyStroke(KeyEvent.VK_W, InputEvent.CTRL_DOWN_MASK), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.createAndAutomaticallyOrientActiveSquare( 0 /*deltLatitude*/, -9 /*deltaLongitude*/); } }); addMenuItem(buildMenu, "Create new square 9 degrees to the North and orient it automatically", KeyStroke.getKeyStroke(KeyEvent.VK_N, InputEvent.CTRL_DOWN_MASK), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.createAndAutomaticallyOrientActiveSquare( +9 /*deltLatitude*/, 0 /*deltaLongitude*/); } }); addMenuItem(buildMenu, "Create new square 9 degrees to the South and orient it automatically", KeyStroke.getKeyStroke(KeyEvent.VK_S, InputEvent.CTRL_DOWN_MASK), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.createAndAutomaticallyOrientActiveSquare( -9 /*deltLatitude*/, 0 /*deltaLongitude*/); } }); buildMenu.addSeparator(); addMenuItem(buildMenu, "Do some canned setup for debugging", KeyStroke.getKeyStroke(KeyEvent.VK_INSERT, 0), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.doCannedSetup(); } }); return buildMenu; } private JMenu buildSelectMenu() { JMenu selectMenu = new JMenu("Select"); addMenuItem(selectMenu, "Select previous square", KeyStroke.getKeyStroke(','), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.selectNextSquare(false /*forward*/); } }); addMenuItem(selectMenu, "Select next square", KeyStroke.getKeyStroke('.'), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.selectNextSquare(true /*forward*/); } }); return selectMenu; } private JMenu buildEditMenu() { JMenu editMenu = new JMenu("Edit"); for (RotationCommand rc : RotationCommand.values()) { this.addAdjustOrientationMenuItem(editMenu, rc.description, rc.key, rc.axis); } editMenu.addSeparator(); addMenuItem(editMenu, "Double active square adjustment angle", KeyStroke.getKeyStroke('='), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.changeAdjustOrientationDegrees(2.0f); } }); addMenuItem(editMenu, "Halve active square adjustment angle", KeyStroke.getKeyStroke('-'), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.changeAdjustOrientationDegrees(0.5f); } }); addMenuItem(editMenu, "Reset active square adjustment angle to 1 degree", KeyStroke.getKeyStroke('1'), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.adjustOrientationDegrees = 1; EarthShape.this.updateUIState(); } }); editMenu.addSeparator(); addMenuItem(editMenu, "Analyze solution space", KeyStroke.getKeyStroke('\''), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.analyzeSolutionSpace(); } }); addMenuItem(editMenu, "Set solution analysis resolution...", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.setSolutionAnalysisResolution(); } }); editMenu.addSeparator(); addMenuItem(editMenu, "Do one recommended rotation adjustment", KeyStroke.getKeyStroke(';'), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.applyRecommendedRotationCommand(); } }); addMenuItem(editMenu, "Automatically orient active square", KeyStroke.getKeyStroke('/'), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.automaticallyOrientActiveSquare(); } }); addMenuItem(editMenu, "Delete active square", KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, 0), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.deleteActiveSquare(); } }); return editMenu; } private JMenu buildNavigateMenu() { JMenu menu = new JMenu("Navigate"); addMenuItem(menu, "Control camera like a first-person shooter", KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.emCanvas.enterFPSMode(); } }); addMenuItem(menu, "Leave first-person shooter mode", KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.emCanvas.exitFPSMode(); } }); addMenuItem(menu, "Go to active square's center", KeyStroke.getKeyStroke('g'), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.goToActiveSquareCenter(); } }); addMenuItem(menu, "Go to origin", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.moveCamera(new Vector3f(0,0,0)); } }); menu.addSeparator(); addMenuItem(menu, "Curvature calculator...", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.showCurvatureDialog(); } }); return menu; } private JMenu buildAnimateMenu() { JMenu menu = new JMenu("Animate"); addMenuItem(menu, "Reset to animation state 0", KeyStroke.getKeyStroke('0'), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.setAnimationState(0); } }); addMenuItem(menu, "Increment animation state", KeyStroke.getKeyStroke(KeyEvent.VK_PAGE_DOWN, 0), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.setAnimationState(+1); } }); addMenuItem(menu, "Decrement animation state", KeyStroke.getKeyStroke(KeyEvent.VK_PAGE_UP, 0), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.setAnimationState(-1); } }); return menu; } private JMenu buildOptionsMenu() { JMenu menu = new JMenu("Options"); addMenuItem(menu, "Choose enabled stars...", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.chooseEnabledStars(); } }); addMenuItem(menu, "Set maximum Sun elevation...", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.setMaximumSunElevation(); } }); this.useSunElevationCBItem = addCBMenuItem(menu, "Take Sun elevation into account", null, this.useSunElevation, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.useSunElevation = !EarthShape.this.useSunElevation; EarthShape.this.updateUIState(); } }); menu.addSeparator(); this.invertHorizontalCameraMovementCBItem = addCBMenuItem(menu, "Invert horizontal camera movement", null, this.emCanvas.invertHorizontalCameraMovement, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.emCanvas.invertHorizontalCameraMovement = !EarthShape.this.emCanvas.invertHorizontalCameraMovement; EarthShape.this.updateUIState(); } }); this.invertVerticalCameraMovementCBItem = addCBMenuItem(menu, "Invert vertical camera movement", null, this.emCanvas.invertVerticalCameraMovement, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.emCanvas.invertVerticalCameraMovement = !EarthShape.this.emCanvas.invertVerticalCameraMovement; EarthShape.this.updateUIState(); } }); menu.addSeparator(); this.newAutomaticOrientationAlgorithmCBItem = addCBMenuItem(menu, "Use new automatic orientation algorithm", null, this.newAutomaticOrientationAlgorithm, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.newAutomaticOrientationAlgorithm = !EarthShape.this.newAutomaticOrientationAlgorithm; EarthShape.this.updateUIState(); } }); this.assumeInfiniteStarDistanceCBItem = addCBMenuItem(menu, "Assume stars are infinitely far away", null, this.assumeInfiniteStarDistance, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.assumeInfiniteStarDistance = !EarthShape.this.assumeInfiniteStarDistance; EarthShape.this.updateAndRedraw(); } }); this.onlyCompareElevationsCBItem = addCBMenuItem(menu, "Only compare star elevations", null, this.onlyCompareElevations, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.onlyCompareElevations = !EarthShape.this.onlyCompareElevations; EarthShape.this.updateAndRedraw(); } }); return menu; } private JMenu buildHelpMenu() { JMenu helpMenu = new JMenu("Help"); addMenuItem(helpMenu, "Show all key bindings...", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.showKeyBindings(); } }); addMenuItem(helpMenu, "About...", null, new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.showAboutBox(); } }); return helpMenu; } /** Add a menu item to adjust the orientation of the active square. */ private void addAdjustOrientationMenuItem( JMenu menu, String description, char key, Vector3f axis) { addMenuItem(menu, description, KeyStroke.getKeyStroke(key), new ActionListener() { public void actionPerformed(ActionEvent e) { EarthShape.this.adjustActiveSquareOrientation(axis); } }); } /** Make a new menu item and add it to 'menu' with the given * label and listener. */ private static void addMenuItem(JMenu menu, String itemLabel, KeyStroke accelerator, ActionListener listener) { JMenuItem item = new JMenuItem(itemLabel); item.addActionListener(listener); if (accelerator != null) { item.setAccelerator(accelerator); } menu.add(item); } private static JCheckBoxMenuItem addCBMenuItem(JMenu menu, String itemLabel, KeyStroke accelerator, boolean initState, ActionListener listener) { JCheckBoxMenuItem cbItem = new JCheckBoxMenuItem(itemLabel, initState); cbItem.addActionListener(listener); if (accelerator != null) { cbItem.setAccelerator(accelerator); } menu.add(cbItem); return cbItem; } /** Choose the set of stars to use. This only affects new * squares. */ private void chooseEnabledStars() { StarListDialog d = new StarListDialog(this, this.enabledStars); if (d.exec()) { this.enabledStars = d.stars; this.updateAndRedraw(); } } /** Clear out the virtual map and any dependent state. */ private void clearSurfaceSquares() { this.emCanvas.clearSurfaceSquares(); this.activeSquare = null; } /** Build a portion of the Earth's surface. Adds squares to * 'surfaceSquares'. This works by iterating over latitude * and longitude pairs and assuming a spherical Earth. It * assumes the Earth is a sphere. */ public void buildSphericalEarthSurfaceWithLatLong() { log("building spherical Earth"); this.clearSurfaceSquares(); // Size of squares to build, in km. float sizeKm = 1000; // Start with an arbitrary square centered at the origin // the 3D space, and at SF, CA in the real world. float startLatitude = 38; // 38N float startLongitude = -122; // 122W SurfaceSquare startSquare = new SurfaceSquare( new Vector3f(0,0,0), // center new Vector3f(0,0,-1), // north new Vector3f(0,1,0), // up sizeKm, startLatitude, startLongitude, null /*base*/, null /*midpoint*/, new Vector3f(0,0,0)); this.emCanvas.addSurfaceSquare(startSquare); // Outer loop 1: Walk North as far as we can. SurfaceSquare outer = startSquare; for (float latitude = startLatitude; latitude < 90; latitude += 9) { // Go North another step. outer = this.addSphericallyAdjacentSquare(outer, latitude, startLongitude); // Inner loop: Walk East until we get back to // the same longitude. float longitude = startLongitude; float prevLongitude = longitude; SurfaceSquare inner = outer; while (true) { inner = this.addSphericallyAdjacentSquare(inner, latitude, longitude); if (prevLongitude < outer.longitude && outer.longitude <= longitude) { break; } prevLongitude = longitude; longitude = FloatUtil.modulus2f(longitude+9, -180, 180); } } // Outer loop 2: Walk South as far as we can. outer = startSquare; for (float latitude = startLatitude - 9; latitude > -90; latitude -= 9) { // Go North another step. outer = this.addSphericallyAdjacentSquare(outer, latitude, startLongitude); // Inner loop: Walk East until we get back to // the same longitude. float longitude = startLongitude; float prevLongitude = longitude; SurfaceSquare inner = outer; while (true) { inner = this.addSphericallyAdjacentSquare(inner, latitude, longitude); if (prevLongitude < outer.longitude && outer.longitude <= longitude) { break; } prevLongitude = longitude; longitude = FloatUtil.modulus2f(longitude+9, -180, 180); } } this.emCanvas.redrawCanvas(); log("finished building Earth; nsquares="+this.emCanvas.numSurfaceSquares()); } /** Build the surface by walking randomly from a starting location, * assuming a Earth is a sphere. */ public void buildSphericalEarthWithRandomWalk() { log("building spherical Earth by random walk"); this.clearSurfaceSquares(); // Size of squares to build, in km. float sizeKm = 1000; // Start with an arbitrary square centered at the origin // the 3D space, and at SF, CA in the real world. float startLatitude = 38; // 38N float startLongitude = -122; // 122W SurfaceSquare startSquare = new SurfaceSquare( new Vector3f(0,0,0), // center new Vector3f(0,0,-1), // north new Vector3f(0,1,0), // up sizeKm, startLatitude, startLongitude, null /*base*/, null /*midpoint*/, new Vector3f(0,0,0)); this.emCanvas.addSurfaceSquare(startSquare); SurfaceSquare square = startSquare; for (int i=0; i < 1000; i++) { // Select a random change in latitude and longitude // of about 10 degrees. float deltaLatitude = (float)(Math.random() * 12 - 6); float deltaLongitude = (float)(Math.random() * 12 - 6); // Walk in that direction, keeping latitude and longitude // within their usual ranges. Also stay away from the poles // since the rounding errors cause problems there. square = this.addSphericallyAdjacentSquare(square, FloatUtil.clampf(square.latitude + deltaLatitude, -80, 80), FloatUtil.modulus2f(square.longitude + deltaLongitude, -180, 180)); } this.emCanvas.redrawCanvas(); log("finished building Earth; nsquares="+this.emCanvas.numSurfaceSquares()); } /** Given square 'old', add an adjacent square at the given * latitude and longitude. The relative orientation of the * new square will determined using the latitude and longitude, * assuming a spherical shape for the Earth. * * This is used by the routines that build the surface using * the sphere assumption, not those that use star observation * data. */ private SurfaceSquare addSphericallyAdjacentSquare( SurfaceSquare old, float newLatitude, float newLongitude) { // Calculate local East for 'old'. Vector3f oldEast = old.north.cross(old.up).normalize(); // Calculate celestial North for 'old', which is given by // the latitude plus geographic North. Vector3f celestialNorth = old.north.rotateDeg(old.latitude, oldEast); // Get lat/long deltas. float deltaLatitude = newLatitude - old.latitude; float deltaLongitude = FloatUtil.modulus2f( newLongitude - old.longitude, -180, 180); // If we didn't move, just return the old square. if (deltaLongitude == 0 && deltaLatitude == 0) { return old; } // What we want now is to first rotate Northward // around local East to account for change in latitude, then // Eastward around celestial North for change in longitude. Vector3f firstRotation = oldEast.times(-deltaLatitude); Vector3f secondRotation = celestialNorth.times(deltaLongitude); // But then we want to express the composition of those as a // single rotation vector in order to call the general routine. Vector3f combined = Vector3f.composeRotations(firstRotation, secondRotation); // Now call into the general procedure for adding a square // given the proper relative orientation rotation. return addRotatedAdjacentSquare(old, newLatitude, newLongitude, combined); } /** Build a surface using star data rather than any presumed * size and shape. */ public void buildEarthSurfaceFromStarData() { Cursor oldCursor = this.getCursor(); this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try { ProgressDialog<Void, Void> pd = new ProgressDialog<Void, Void>(this, "Building Surface with model: "+this.worldObservations.getDescription(), new BuildSurfaceTask(EarthShape.this)); pd.exec(); } finally { this.setCursor(oldCursor); } this.emCanvas.redrawCanvas(); } /** Task to manage construction of surface. * * I have to use a class rather than a simple closure so I have * something to pass to the build routines so they can set the * status and progress as the algorithm runs. */ private static class BuildSurfaceTask extends MySwingWorker<Void, Void> { private EarthShape earthShape; public BuildSurfaceTask(EarthShape earthShape_) { this.earthShape = earthShape_; } protected Void doTask() throws Exception { this.earthShape.buildEarthSurfaceFromStarDataInner(this); return null; } } /** Core of 'buildEarthSurfaceFromStarData', so I can more easily * wrap computation around it. */ public void buildEarthSurfaceFromStarDataInner(BuildSurfaceTask task) { log("building Earth using star data: "+this.worldObservations.getDescription()); this.clearSurfaceSquares(); // Size of squares to build, in km. float sizeKm = 1000; // Start at approximately my location in SF, CA. This is one of // the locations for which I have manual data, and when we build // the first latitude strip, that will pick up the other manual // data points. float latitude = 38; float longitude = -122; SurfaceSquare square = null; log("buildEarth: building first square at lat="+latitude+" long="+longitude); // First square will be placed at the 3D origin with // its North pointed along the -Z axis. square = new SurfaceSquare( new Vector3f(0,0,0), // center new Vector3f(0,0,-1), // north new Vector3f(0,1,0), // up sizeKm, latitude, longitude, null /*base*/, null /*midpoint*/, new Vector3f(0,0,0)); this.emCanvas.addSurfaceSquare(square); this.addMatchingData(square); // Go East and West. task.setStatus("Initial latitude strip at "+square.latitude); this.buildLatitudeStrip(square, +9); this.buildLatitudeStrip(square, -9); // Explore in all directions until all points on // the surface have been explored (to within 9 degrees). this.buildLongitudeStrip(square, +9, task); this.buildLongitudeStrip(square, -9, task); // Reset the adjustment angle. this.adjustOrientationDegrees = EarthShape.DEFAULT_ADJUST_ORIENTATION_DEGREES; log("buildEarth: finished using star data; nSquares="+this.emCanvas.numSurfaceSquares()); } /** Build squares by going North or South from a starting square * until we add 20 or we can't add any more. At each spot, also * build latitude strips in both directions. */ private void buildLongitudeStrip(SurfaceSquare startSquare, float deltaLatitude, BuildSurfaceTask task) { float curLatitude = startSquare.latitude; float curLongitude = startSquare.longitude; SurfaceSquare curSquare = startSquare; if (task.isCancelled()) { // Bail now, rather than repeat the cancellation log message. return; } while (!task.isCancelled()) { float newLatitude = curLatitude + deltaLatitude; if (!( -90 < newLatitude && newLatitude < 90 )) { // Do not go past the poles. break; } float newLongitude = curLongitude; log("buildEarth: building lat="+newLatitude+" long="+newLongitude); // Report progress. task.setStatus("Latitude strip at "+newLatitude); { // +1 since we did one strip before the first call to // buildLongitudeStrip. float totalStrips = 180 / (float)Math.abs(deltaLatitude) + 1; float completedStrips; if (deltaLatitude > 0) { completedStrips = (newLatitude - startSquare.latitude) / deltaLatitude + 1; } else { completedStrips = (90 - newLatitude) / -deltaLatitude + 1; } float fraction = completedStrips / totalStrips; log("progress fraction: "+fraction); task.setProgressFraction(fraction); } curSquare = this.createAndAutomaticallyOrientSquare(curSquare, newLatitude, newLongitude); if (curSquare == null) { log("buildEarth: could not place next square!"); break; } curLatitude = newLatitude; curLongitude = newLongitude; // Also build strips in each direction. this.buildLatitudeStrip(curSquare, +9); this.buildLatitudeStrip(curSquare, -9); } if (task.isCancelled()) { log("surface construction canceled"); } } /** Build squares by going East or West from a starting square * until we add 20 or we can't add any more. */ private void buildLatitudeStrip(SurfaceSquare startSquare, float deltaLongitude) { float curLatitude = startSquare.latitude; float curLongitude = startSquare.longitude; SurfaceSquare curSquare = startSquare; for (int i=0; i < 20; i++) { float newLatitude = curLatitude; float newLongitude = FloatUtil.modulus2f(curLongitude + deltaLongitude, -180, 180); log("buildEarth: building lat="+newLatitude+" long="+newLongitude); curSquare = this.createAndAutomaticallyOrientSquare(curSquare, newLatitude, newLongitude); if (curSquare == null) { log("buildEarth: could not place next square!"); break; } curLatitude = newLatitude; curLongitude = newLongitude; } } /** Create a square adjacent to 'old', positioned at the given latitude * and longitude, with orientation changed by 'rotation'. If there is * no change, return null. Even if not, do not add the square yet, * just return it. */ private SurfaceSquare createRotatedAdjacentSquare( SurfaceSquare old, float newLatitude, float newLongitude, Vector3f rotation) { // Normalize latitude and longitude. newLatitude = FloatUtil.clampf(newLatitude, -90, 90); newLongitude = FloatUtil.modulus2f(newLongitude, -180, 180); // If we didn't move, return null. if (old.latitude == newLatitude && old.longitude == newLongitude) { return null; } // Compute the new orientation vectors by rotating // the old ones by the given amount. Vector3f newNorth = old.north.rotateAADeg(rotation); Vector3f newUp = old.up.rotateAADeg(rotation); // Get observed travel details going to the new location. TravelObservation tobs = this.worldObservations.getTravelObservation( old.latitude, old.longitude, newLatitude, newLongitude); // For both old and new, calculate a unit vector for the // travel direction. Both headings are negated due to the // right hand rule for rotation. The new to old heading is // then flipped 180 since I want both to indicate the local // direction from old to new. Vector3f oldTravel = old.north.rotateDeg(-tobs.startToEndHeading, old.up); Vector3f newTravel = newNorth.rotateDeg(-tobs.endToStartHeading + 180, newUp); // Calculate the new square's center by going half the distance // according to the old orientation and then half the distance // according to the new orientation, in world coordinates. float halfDistWorld = tobs.distanceKm / 2.0f * EarthMapCanvas.SPACE_UNITS_PER_KM; Vector3f midPoint = old.center.plus(oldTravel.times(halfDistWorld)); Vector3f newCenter = midPoint.plus(newTravel.times(halfDistWorld)); // Make the new square and add it to the list. SurfaceSquare ret = new SurfaceSquare( newCenter, newNorth, newUp, old.sizeKm, newLatitude, newLongitude, old /*base*/, midPoint, rotation); return ret; } /** Add a square adjacent to 'old', positioned at the given latitude * and longitude, with orientation changed by 'rotation'. If we * did not move, this returns the old square. */ private SurfaceSquare addRotatedAdjacentSquare( SurfaceSquare old, float newLatitude, float newLongitude, Vector3f rotation) { SurfaceSquare ret = this.createRotatedAdjacentSquare( old, newLatitude, newLongitude, rotation); if (ret == null) { return old; // Did not move. } else { this.emCanvas.addSurfaceSquare(ret); } return ret; } /** Get star observations for the given location, at the particular * point in time that I am using for everything. */ private List<StarObservation> getStarObservationsFor( float latitude, float longitude) { return this.worldObservations.getStarObservations( StarObservation.unixTimeOfManualData, latitude, longitude); } /** Add to 'square.starObs' all entries of 'starObs' that have * the same latitude and longitude, and also are at least * 20 degrees above the horizon. */ private void addMatchingData(SurfaceSquare square) { for (StarObservation so : this.getStarObservationsFor(square.latitude, square.longitude)) { if (this.qualifyingStarObservation(so)) { square.addObservation(so); } } } /** Compare star data for 'startSquare' and for the given new * latitude and longitude. Return a rotation vector that will * transform the orientation of 'startSquare' to match the * best surface for a new square at the new location. The * vector's length is the amount of rotation in degrees. * * Returns null if there are not enough stars in common. */ private Vector3f calcRequiredRotation( SurfaceSquare startSquare, float newLatitude, float newLongitude) { // Set of stars visible at the start and end squares and // above 20 degrees above the horizon. HashMap<String, Vector3f> startStars = getVisibleStars(startSquare.latitude, startSquare.longitude); HashMap<String, Vector3f> endStars = getVisibleStars(newLatitude, newLongitude); // Current best rotation and average difference. Vector3f currentRotation = new Vector3f(0,0,0); // Iteratively refine the current rotation by computing the // average correction rotation and applying it until that // correction drops below a certain threshold. for (int iterationCount = 0; iterationCount < 1000; iterationCount++) { // Accumulate the vector sum of all the rotation difference // vectors as well as the max length. Vector3f diffSum = new Vector3f(0,0,0); float maxDiffLength = 0; int diffCount = 0; for (HashMap.Entry<String, Vector3f> e : startStars.entrySet()) { String starName = e.getKey(); Vector3f startVector = e.getValue(); Vector3f endVector = endStars.get(starName); if (endVector == null) { continue; } // Both vectors must first be rotated the way the start // surface was rotated since its creation so that when // we compute the final required rotation, it can be // applied to the start surface in its existing orientation, // not the nominal orientation that the star vectors have // before I do this. startVector = startVector.rotateAADeg(startSquare.rotationFromNominal); endVector = endVector.rotateAADeg(startSquare.rotationFromNominal); // Calculate a difference rotation vector from the // rotated end vector to the start vector. Rotating // the end star in one direction is like rotating // the start terrain in the opposite direction. Vector3f rot = endVector.rotateAADeg(currentRotation) .rotationToBecome(startVector); // Accumulate it. diffSum = diffSum.plus(rot); maxDiffLength = (float)Math.max(maxDiffLength, rot.length()); diffCount++; } if (diffCount < 2) { log("reqRot: not enough stars"); return null; } // Calculate the average correction rotation. Vector3f avgDiff = diffSum.times(1.0f / diffCount); // If the correction angle is small enough, stop. For any set // of observations, we should be able to drive the average // difference arbitrarily close to zero (this is like finding // the centroid, except in spherical rather than flat space). // The real question is whether the *maximum* difference is // large enough to indicate that the data is inconsistent. if (avgDiff.length() < 0.001) { log("reqRot finished: iters="+iterationCount+ " avgDiffLen="+avgDiff.length()+ " maxDiffLength="+maxDiffLength+ " diffCount="+diffCount); if (maxDiffLength > 0.2) { // For the data I am working with, I estimate it is // accurate to within 0.2 degrees. Consequently, // there should not be a max difference that large. log("reqRot: WARNING: maxDiffLength greater than 0.2"); } return currentRotation; } // Otherwise, apply it to the current rotation and // iterate again. currentRotation = currentRotation.plus(avgDiff); } log("reqRot: hit iteration limit!"); return currentRotation; } /** True if the given observation is available for use, meaning * it is high enough in the sky, is enabled, and not obscured * by light from the Sun. */ private boolean qualifyingStarObservation(StarObservation so) { if (this.sunIsTooHigh(so.latitude, so.longitude)) { return false; } return so.elevation >= 20.0f && this.enabledStars.containsKey(so.name) && this.enabledStars.get(so.name) == true; } /** Return true if, at StarObservation.unixTimeOfManualData, the * Sun is too high in the sky to see stars. This depends on * the configurable parameter 'maximumSunElevation'. */ private boolean sunIsTooHigh(float latitude, float longitude) { if (!this.useSunElevation) { return false; } StarObservation sun = this.worldObservations.getSunObservation( StarObservation.unixTimeOfManualData, latitude, longitude); if (sun == null) { return false; } return sun.elevation > this.maximumSunElevation; } /** For every visible star vislble at the specified coordinate * that has an elevation of at least 20 degrees, * add it to a map from star name to azEl vector. */ private HashMap<String, Vector3f> getVisibleStars( float latitude, float longitude) { HashMap<String, Vector3f> ret = new HashMap<String, Vector3f>(); for (StarObservation so : this.getStarObservationsFor(latitude, longitude)) { if (this.qualifyingStarObservation(so)) { ret.put(so.name, Vector3f.azimuthElevationToVector(so.azimuth, so.elevation)); } } return ret; } /** Get the unit ray, in world coordinates, from the center of 'square' to * the star recorded in 'so', which was observed at this square. */ public static Vector3f rayToStar(SurfaceSquare square, StarObservation so) { // Ray to star in nominal, -Z facing, coordinates. Vector3f nominalRay = Vector3f.azimuthElevationToVector(so.azimuth, so.elevation); // Ray to star in world coordinates, taking into account // how the surface is rotated. Vector3f worldRay = nominalRay.rotateAADeg(square.rotationFromNominal); return worldRay; } /** Hold results of call to 'fitOfObservations'. */ private static class ObservationStats { /** The total variance in star observation locations from the * indicated square to the observations of its base square as the * average square of the deviation angles. * * The reason for using a sum of squares approach is to penalize large * deviations and to ensure there is a unique "least deviated" point * (which need not exist when using a simple sum). The reason for using * the average is to make it easier to judge "good" or "bad" fits, * regardless of the number of star observations in common. * * I use the term "variance" here because it is similar to the idea in * statistics, except here we are measuring differences between pairs of * observations, rather than between individual observations and the mean * of the set. I'll then reserve "deviation", if I use it, to refer to * the square root of the variance, by analogy with "standard deviation". * */ public double variance; /** Maximum separation between observations, in degrees. */ public double maxSeparation; /** Number of pairs of stars used in comparison. */ public int numSamples; } /** Calculate variance and maximum separation for 'square'. Returns * null if there is no base or there are no observations in common. */ private ObservationStats fitOfObservations(SurfaceSquare square) { if (square.baseSquare == null) { return null; } double sumOfSquares = 0; int numSamples = 0; double maxSeparation = 0; for (Map.Entry<String, StarObservation> entry : square.starObs.entrySet()) { StarObservation so = entry.getValue(); // Ray to star in world coordinates. Vector3f starRay = EarthShape.rayToStar(square, so); // Calculate the deviation of this observation from that of // the base square. StarObservation baseObservation = square.baseSquare.findObservation(so.name); if (baseObservation != null) { // Get ray from base square to the base observation star // in world coordinates. Vector3f baseStarRay = EarthShape.rayToStar(square.baseSquare, baseObservation); // Visual separation angle between these rays. double sep; if (this.assumeInfiniteStarDistance) { sep = this.getStarRayDifference( square.up, starRay, baseStarRay); } else { sep = EarthShape.getModifiedClosestApproach( square.center, starRay, square.baseSquare.center, baseStarRay).separationAngleDegrees; } if (sep > maxSeparation) { maxSeparation = sep; } // Accumulate its square. sumOfSquares += sep * sep; numSamples++; } } if (numSamples == 0) { return null; } else { ObservationStats ret = new ObservationStats(); ret.variance = sumOfSquares / numSamples; ret.maxSeparation = maxSeparation; ret.numSamples = numSamples; return ret; } } /** Get closest approach, except with a modification to * smooth out the search space. */ public static Vector3d.ClosestApproach getModifiedClosestApproach( Vector3f p1f, Vector3f u1f, Vector3f p2f, Vector3f u2f) { Vector3d p1 = new Vector3d(p1f); Vector3d u1 = new Vector3d(u1f); Vector3d p2 = new Vector3d(p2f); Vector3d u2 = new Vector3d(u2f); Vector3d.ClosestApproach ca = Vector3d.getClosestApproach(p1, u1, p2, u2); if (ca.line1Closest != null) { // Now, there is a problem if the closest approach is behind // either observer. Not only does that not make logical sense, // but naively using the calculation will cause the search // space to be very lumpy, which creates local minima that my // hill-climbing algorithm gets trapped in. So, we require // that the points on each observation line be at least one // unit away, which currently means 1000 km. That smooths out // the search space so the hill climber will find its way to // the optimal solution more reliably. // How far along u1 is the closest approach? double m1 = ca.line1Closest.minus(p1).dot(u1); if (m1 < 1.0) { // That is unreasonably close. Push the approach point // out to one unit away along u1. ca.line1Closest = p1.plus(u1); // Find the closest point on (p2,u2) to that point. ca.line2Closest = ca.line1Closest.closestPointOnLine(p2, u2); // Recalculate the separation angle to that point. ca.separationAngleDegrees = u1.separationAngleDegrees(ca.line2Closest.minus(p1)); } // How far along u2? double m2 = ca.line2Closest.minus(p2).dot(u2); if (m2 < 1.0) { // Too close; push it. ca.line2Closest = p2.plus(u2); // What is closest on (p1,u1) to that? ca.line1Closest = ca.line2Closest.closestPointOnLine(p1, u1); // Re-check if that is too close to p1. if (ca.line1Closest.minus(p1).dot(u1) < 1.0) { // Push it without changing line2Closest. ca.line1Closest = p1.plus(u1); } // Recalculate the separation angle to that point. ca.separationAngleDegrees = u1.separationAngleDegrees(ca.line2Closest.minus(p1)); } } return ca; } /** Get the difference between the two star rays, for a location * with given unit 'up' vector, in degrees. This depends on the * option setting 'onlyCompareElevations'. */ public double getStarRayDifference( Vector3f up, Vector3f ray1, Vector3f ray2) { if (this.onlyCompareElevations) { return EarthShape.getElevationDifference(up, ray1, ray2); } else { return ray1.separationAngleDegrees(ray2); } } /** Given two star observation rays at a location with the given * 'up' unit vector, return the difference in elevation between * them, ignoring azimuth, in degrees. */ private static double getElevationDifference( Vector3f up, Vector3f ray1, Vector3f ray2) { double e1 = getElevation(up, ray1); double e2 = getElevation(up, ray2); return Math.abs(e1-e2); } /** Return the elevation of 'ray' at a location with unit 'up' * vector, in degrees. */ private static double getElevation(Vector3f up, Vector3f ray) { // Decompose into vertical and horizontal components. Vector3f v = ray.projectOntoUnitVector(up); Vector3f h = ray.minus(v); // Get lengths, with vertical possibly negative if below // horizon. double vLen = ray.dot(up); double hLen = h.length(); // Calculate corresponding angle. return FloatUtil.atan2Deg(vLen, hLen); } /** Begin constructing a new surface using star data. This just * places down the initial square to represent a user-specified * latitude and longitude. The square is placed into 3D space * at a fixed location. */ public void startNewSurface() { LatLongDialog d = new LatLongDialog(this, 38, -122); if (d.exec()) { this.startNewSurfaceAt(d.finalLatitude, d.finalLongitude); } } /** Same as 'startNewSurface' but at a specified location. */ public void startNewSurfaceAt(float latitude, float longitude) { log("starting new surface at lat="+latitude+", lng="+longitude); this.clearSurfaceSquares(); this.setActiveSquare(new SurfaceSquare( new Vector3f(0,0,0), // center new Vector3f(0,0,-1), // north new Vector3f(0,1,0), // up INITIAL_SQUARE_SIZE_KM, latitude, longitude, null /*base*/, null /*midpoint*/, new Vector3f(0,0,0))); this.addMatchingData(this.activeSquare); this.emCanvas.addSurfaceSquare(this.activeSquare); this.emCanvas.redrawCanvas(); } /** Get the active square, or null if none. */ public SurfaceSquare getActiveSquare() { return this.activeSquare; } /** Change which square is active, but do not trigger a redraw. */<|fim▁hole|> this.activeSquare.showAsActive = false; } this.activeSquare = sq; if (this.activeSquare != null) { this.activeSquare.showAsActive = true; } } /** Change which square is active. */ public void setActiveSquare(SurfaceSquare sq) { this.setActiveSquareNoRedraw(sq); this.updateAndRedraw(); } /** Add another square to the surface by building one adjacent * to the active square. */ private void buildNextSquare() { if (this.activeSquare == null) { this.errorBox("No square is active."); return; } LatLongDialog d = new LatLongDialog(this, this.activeSquare.latitude, this.activeSquare.longitude + 9); if (d.exec()) { this.buildNextSquareAt(d.finalLatitude, d.finalLongitude); } } /** Same as 'buildNextSquare' except at a specified location. */ private void buildNextSquareAt(float latitude, float longitude) { // The new square should draw star rays if the old did. boolean drawStarRays = this.activeSquare.drawStarRays; // Add it initially with no rotation. My plan is to add // the rotation interactively afterward. this.setActiveSquare( this.addRotatedAdjacentSquare(this.activeSquare, latitude, longitude, new Vector3f(0,0,0))); this.activeSquare.drawStarRays = drawStarRays; // Reset the rotation angle after adding a square. this.adjustOrientationDegrees = DEFAULT_ADJUST_ORIENTATION_DEGREES; this.addMatchingData(this.activeSquare); this.emCanvas.redrawCanvas(); } /** If there is an active square, assume we just built it, and now * we want to adjust its orientation. 'axis' indicates the axis * about which to rotate, relative to the square's current orientation, * where -Z is North, +Y is up, and +X is east, and the angle is given * by 'this.adjustOrientationDegrees'. */ private void adjustActiveSquareOrientation(Vector3f axis) { SurfaceSquare derived = this.activeSquare; if (derived == null) { this.errorBox("No active square."); return; } SurfaceSquare base = derived.baseSquare; if (base == null) { this.errorBox("The active square has no base square."); return; } // Replace the active square. this.setActiveSquare( this.adjustDerivedSquareOrientation(axis, derived, this.adjustOrientationDegrees)); this.emCanvas.redrawCanvas(); } /** Adjust the orientation of 'derived' by 'adjustDegrees' around * 'axis', where 'axis' is relative to the square's current * orientation. */ private SurfaceSquare adjustDerivedSquareOrientation(Vector3f axis, SurfaceSquare derived, float adjustDegrees) { SurfaceSquare base = derived.baseSquare; // Rotate by 'adjustOrientationDegrees'. Vector3f angleAxis = axis.times(adjustDegrees); // Rotate the axis to align it with the square. angleAxis = angleAxis.rotateAADeg(derived.rotationFromNominal); // Now add that to the square's existing rotation relative // to its base square. angleAxis = Vector3f.composeRotations(derived.rotationFromBase, angleAxis); // Now, replace it. return this.replaceWithNewRotation(base, derived, angleAxis); } /** Replace the square 'derived', with a new square that * is computed from 'base' by applying 'newRotation'. * Return the new square. */ public SurfaceSquare replaceWithNewRotation( SurfaceSquare base, SurfaceSquare derived, Vector3f newRotation) { // Replace the derived square with a new one created by // rotating from the same base by this new amount. this.emCanvas.removeSurfaceSquare(derived); SurfaceSquare ret = this.addRotatedAdjacentSquare(base, derived.latitude, derived.longitude, newRotation); // Copy some other data from the derived square that we // are in the process of discarding. ret.drawStarRays = derived.drawStarRays; ret.starObs = derived.starObs; return ret; } /** Calculate what the variation of observations would be for * 'derived' if its orientation were adjusted by * 'angleAxis.degrees()' around 'angleAxis'. Returns null if * the calculation cannot be done because of missing information. */ private ObservationStats fitOfAdjustedSquare( SurfaceSquare derived, Vector3f angleAxis) { // This part mirrors 'adjustActiveSquareOrientation'. SurfaceSquare base = derived.baseSquare; if (base == null) { return null; } angleAxis = angleAxis.rotateAADeg(derived.rotationFromNominal); angleAxis = Vector3f.composeRotations(derived.rotationFromBase, angleAxis); // Now, create a new square with this new rotation. SurfaceSquare newSquare = this.createRotatedAdjacentSquare(base, derived.latitude, derived.longitude, angleAxis); if (newSquare == null) { // If we do not move, use the original square's data. return this.fitOfObservations(derived); } // Copy the observation data since that is needed to calculate // the deviation. newSquare.starObs = derived.starObs; // Now calculate the new variance. return this.fitOfObservations(newSquare); } /** Like 'fitOfAdjustedSquare' except only retrieves the * variance. This returns 40000 if the data is unavailable. */ private double varianceOfAdjustedSquare( SurfaceSquare derived, Vector3f angleAxis) { ObservationStats os = this.fitOfAdjustedSquare(derived, angleAxis); if (os == null) { // The variance should never be greater than 180 squared, // since that would be the worst possible fit for a star. return 40000; } else { return os.variance; } } /** Change 'adjustOrientationDegrees' by the given multiplier. */ private void changeAdjustOrientationDegrees(float multiplier) { this.adjustOrientationDegrees *= multiplier; if (this.adjustOrientationDegrees < MINIMUM_ADJUST_ORIENTATION_DEGREES) { this.adjustOrientationDegrees = MINIMUM_ADJUST_ORIENTATION_DEGREES; } this.updateUIState(); } /** Compute and apply a single step rotation command to improve * the variance of the active square. */ private void applyRecommendedRotationCommand() { SurfaceSquare s = this.activeSquare; if (s == null) { this.errorBox("No active square."); return; } ObservationStats ostats = this.fitOfObservations(s); if (ostats == null) { this.errorBox("Not enough observational data available."); return; } if (ostats.variance == 0) { this.errorBox("Orientation is already optimal."); return; } // Get the recommended rotation. VarianceAfterRotations var = this.getVarianceAfterRotations(s, this.adjustOrientationDegrees); if (var.bestRC == null) { if (this.adjustOrientationDegrees <= MINIMUM_ADJUST_ORIENTATION_DEGREES) { this.errorBox("Cannot further improve orientation."); return; } else { this.changeAdjustOrientationDegrees(0.5f); } } else { this.adjustActiveSquareOrientation(var.bestRC.axis); } this.updateAndRedraw(); } /** Apply the recommended rotation to 's' until convergence. Return * the improved square, or null if that is not possible due to * insufficient constraints. */ private SurfaceSquare repeatedlyApplyRecommendedRotationCommand(SurfaceSquare s) { ObservationStats ostats = this.fitOfObservations(s); if (ostats == null || ostats.numSamples < 2) { return null; // Underconstrained. } if (ostats.variance == 0) { return s; // Already optimal. } // Rotation amount. This will be gradually reduced. float adjustDegrees = 1.0f; // Iteration cap for safety. int iters = 0; // Iterate until the adjust amount is too small. while (adjustDegrees > MINIMUM_ADJUST_ORIENTATION_DEGREES) { // Get the recommended rotation. VarianceAfterRotations var = this.getVarianceAfterRotations(s, adjustDegrees); if (var == null) { return null; } if (var.underconstrained) { log("repeatedlyApply: solution is underconstrained, adjustDegrees="+ adjustDegrees); return s; } if (var.bestRC == null) { adjustDegrees = adjustDegrees * 0.5f; // Set the UI adjust degrees to what we came up with here so I // can easily see where it ended up. this.adjustOrientationDegrees = adjustDegrees; } else { s = this.adjustDerivedSquareOrientation(var.bestRC.axis, s, adjustDegrees); } if (++iters > 1000) { log("repeatedlyApply: exceeded iteration cap!"); break; } } // Get the final variance. String finalVariance = "null"; ostats = this.fitOfObservations(s); if (ostats != null) { finalVariance = ""+ostats.variance; } log("repeatedlyApply done: iters="+iters+" adj="+ adjustDegrees+ " var="+finalVariance); return s; } /** Delete the active square. */ private void deleteActiveSquare() { if (this.activeSquare == null) { this.errorBox("No active square."); return; } this.emCanvas.removeSurfaceSquare(this.activeSquare); this.setActiveSquare(null); } /** Calculate and apply the optimal orientation for the active square; * make its replacement active if we do replace it.. */ private void automaticallyOrientActiveSquare() { SurfaceSquare derived = this.activeSquare; if (derived == null) { this.errorBox("No active square."); return; } SurfaceSquare base = derived.baseSquare; if (base == null) { this.errorBox("The active square has no base square."); return; } SurfaceSquare newDerived = automaticallyOrientSquare(derived); if (newDerived == null) { this.errorBox("Insufficient observations to determine proper orientation."); } else { this.setActiveSquare(newDerived); } this.updateAndRedraw(); } /** Given a square 'derived' that is known to have a base square, * adjust and/or replace it with one with a better orientation, * and return the improved square. Returns null if improvement * is not possible due to insufficient observational data. */ private SurfaceSquare automaticallyOrientSquare(SurfaceSquare derived) { if (this.newAutomaticOrientationAlgorithm) { return this.repeatedlyApplyRecommendedRotationCommand(derived); } else { // Calculate the best rotation. Vector3f rot = calcRequiredRotation(derived.baseSquare, derived.latitude, derived.longitude); if (rot == null) { return null; } // Now, replace the active square. return this.replaceWithNewRotation(derived.baseSquare, derived, rot); } } /** Make the next square in 'emCanvas.surfaceSquares' active. */ private void selectNextSquare(boolean forward) { this.setActiveSquare(this.emCanvas.getNextSquare(this.activeSquare, forward)); } /** Build a square offset from the active square, set its orientation, * and make it active. If we cannot make a new square, report that as * an error and leave the active square alone. */ private void createAndAutomaticallyOrientActiveSquare( float deltaLatitude, float deltaLongitude) { SurfaceSquare base = this.activeSquare; if (base == null) { this.errorBox("There is no active square."); return; } SurfaceSquare newSquare = this.createAndAutomaticallyOrientSquare( base, base.latitude + deltaLatitude, base.longitude + deltaLongitude); if (newSquare == null) { ModalDialog.errorBox(this, "Cannot place new square since observational data does not uniquely determine its orientation."); } else { newSquare.drawStarRays = base.drawStarRays; this.setActiveSquare(newSquare); } } /** Build a square adjacent to the base square, set its orientation, * and return it. Returns null and adds nothing if such a square * cannot be uniquely oriented. */ private SurfaceSquare createAndAutomaticallyOrientSquare(SurfaceSquare base, float newLatitude, float newLongitude) { // Make a new adjacent square, initially with the same orientation // as the base square. SurfaceSquare newSquare = this.addRotatedAdjacentSquare(base, newLatitude, newLongitude, new Vector3f(0,0,0)); if (base == newSquare) { return base; // Did not move, no new square created. } this.addMatchingData(newSquare); // Now try to set its orientation to match observations. SurfaceSquare adjustedSquare = this.automaticallyOrientSquare(newSquare); if (adjustedSquare == null) { // No unique solution; remove the new square too. this.emCanvas.removeSurfaceSquare(newSquare); } return adjustedSquare; } /** Show the user what the local rotation space looks like by. * considering the effect of rotating various amounts on each axis. */ private void analyzeSolutionSpace() { if (this.activeSquare == null) { this.errorBox("No active square."); return; } SurfaceSquare s = this.activeSquare; ObservationStats ostats = this.fitOfObservations(s); if (ostats == null) { this.errorBox("No observation fitness stats for the active square."); return; } // Prepare a task object in which to run the analysis. AnalysisTask task = new AnalysisTask(this, s); // Show a progress dialog while this run. ProgressDialog<PlotData3D, Void> progressDialog = new ProgressDialog<PlotData3D, Void>(this, "Analyzing rotations of active square...", task); // Run the dialog and the task. if (progressDialog.exec()) { // Retrieve the computed data. PlotData3D rollPitchYawPlotData; try { rollPitchYawPlotData = task.get(); } catch (Exception e) { String msg = "Internal error: solution space analysis failed: "+e.getMessage(); log(msg); e.printStackTrace(); this.errorBox(msg); return; } // Plot results. RotationCubeDialog d = new RotationCubeDialog(this, (float)ostats.variance, rollPitchYawPlotData); d.exec(); } else { log("Analysis canceled."); } } /** Task to analyze the solution space near a square, which can take a * while if 'solutionAnalysisPointsPerSide' is high. */ private static class AnalysisTask extends MySwingWorker<PlotData3D, Void> { /** Enclosing EarthShape instance. */ private EarthShape earthShape; /** Square whose solution will be analyzed. */ private SurfaceSquare square; public AnalysisTask( EarthShape earthShape_, SurfaceSquare square_) { this.earthShape = earthShape_; this.square = square_; } @Override protected PlotData3D doTask() throws Exception { return this.earthShape.getThreeRotationAxisPlotData(this.square, this); } } /** Get data for various rotation angles of all three axes. * * This runs in a worker thread. However, I haven't bothered * to synchronize access since the user shouldn't be able to * do anything while this is happening (although they can...), * and most of the shared data is immutable. */ private PlotData3D getThreeRotationAxisPlotData(SurfaceSquare s, AnalysisTask task) { // Number of data points on each side of 0. int pointsPerSide = this.solutionAnalysisPointsPerSide; // Total number of data points per axis, including 0. int pointsPerAxis = pointsPerSide * 2 + 1; // Complete search space cube. float[] wData = new float[pointsPerAxis * pointsPerAxis * pointsPerAxis]; Vector3f xAxis = new Vector3f(0, 0, -1); // Roll Vector3f yAxis = new Vector3f(1, 0, 0); // Pitch Vector3f zAxis = new Vector3f(0, -1, 0); // Yaw float xFirst = -pointsPerSide * this.adjustOrientationDegrees; float xLast = pointsPerSide * this.adjustOrientationDegrees; float yFirst = -pointsPerSide * this.adjustOrientationDegrees; float yLast = pointsPerSide * this.adjustOrientationDegrees; float zFirst = -pointsPerSide * this.adjustOrientationDegrees; float zLast = pointsPerSide * this.adjustOrientationDegrees; for (int zIndex=0; zIndex < pointsPerAxis; zIndex++) { if (task.isCancelled()) { log("analysis canceled"); return null; // Bail out. } task.setProgressFraction(zIndex / (float)pointsPerAxis); task.setStatus("Analyzing plane "+(zIndex+1)+" of "+pointsPerAxis); for (int yIndex=0; yIndex < pointsPerAxis; yIndex++) { for (int xIndex=0; xIndex < pointsPerAxis; xIndex++) { // Compose rotations about each axis: X then Y then Z. // // Note: Rotations don't commute, so the axes are not // being treated perfectly symmetrically here, but this // is still good for showing overall shape, and when // we zoom in to small angles, the non-commutativity // becomes insignificant. Vector3f rotX = xAxis.times(this.adjustOrientationDegrees * (xIndex - pointsPerSide)); Vector3f rotY = yAxis.times(this.adjustOrientationDegrees * (yIndex - pointsPerSide)); Vector3f rotZ = zAxis.times(this.adjustOrientationDegrees * (zIndex - pointsPerSide)); Vector3f rot = Vector3f.composeRotations( Vector3f.composeRotations(rotX, rotY), rotZ); // Get variance after that adjustment. wData[xIndex + pointsPerAxis * yIndex + pointsPerAxis * pointsPerAxis * zIndex] = (float)this.varianceOfAdjustedSquare(s, rot); } } } return new PlotData3D( xFirst, xLast, yFirst, yLast, zFirst, zLast, pointsPerAxis /*xValuesPerRow*/, pointsPerAxis /*yValuesPerColumn*/, wData); } /** Show a dialog to let the user change * 'solutionAnalysisPointsPerSide'. */ private void setSolutionAnalysisResolution() { String choice = JOptionPane.showInputDialog(this, "Specify number of data points on each side of 0 to sample "+ "when performing a solution analysis", (Integer)this.solutionAnalysisPointsPerSide); if (choice != null) { try { int c = Integer.valueOf(choice); if (c < 1) { this.errorBox("The minimum number of points is 1."); } else if (c > 100) { // At 100, it will take about a minute to complete. this.errorBox("The maximum number of points is 100."); } else { this.solutionAnalysisPointsPerSide = c; } } catch (NumberFormatException e) { this.errorBox("Invalid integer syntax: "+e.getMessage()); } } } /** Prompt the user for a floating-point value. Returns null if the * user cancels or enters an invalid value. In the latter case, an * error box has already been shown. */ private Float floatInputDialog(String label, float curValue) { String choice = JOptionPane.showInputDialog(this, label, (Float)curValue); if (choice != null) { try { return Float.valueOf(choice); } catch (NumberFormatException e) { this.errorBox("Invalid float syntax: "+e.getMessage()); return null; } } else { return null; } } /** Let the user specify a new maximum Sun elevation. */ private void setMaximumSunElevation() { Float newValue = this.floatInputDialog( "Specify maximum elevation of the Sun in degrees "+ "above the horizon (otherwise, stars are not visible)", this.maximumSunElevation); if (newValue != null) { this.maximumSunElevation = newValue; } } /** Let the user specify the distance to the skybox. */ private void setSkyboxDistance() { Float newValue = this.floatInputDialog( "Specify distance in 3D space units (each of which usually "+ "represents 1000km of surface) to the skybox.\n"+ "A value of 0 means the skybox is infinitely far away.", this.emCanvas.skyboxDistance); if (newValue != null) { if (newValue < 0) { this.errorBox("The skybox distance must be non-negative."); } else { this.emCanvas.skyboxDistance = newValue; this.updateAndRedraw(); } } } /** Make this window visible. */ public void makeVisible() { this.setVisible(true); // It seems I can only set the focus once the window is // visible. There is an example in the focus tutorial of // calling pack() first, but that resizes the window and // I don't want that. this.emCanvas.setFocusOnCanvas(); } public static void main(String[] args) { SwingUtilities.invokeLater(new Runnable() { public void run() { (new EarthShape()).makeVisible(); } }); } public void toggleDrawAxes() { this.emCanvas.drawAxes = !this.emCanvas.drawAxes; this.updateAndRedraw(); } public void toggleDrawCrosshair() { this.emCanvas.drawCrosshair = !this.emCanvas.drawCrosshair; this.updateAndRedraw(); } /** Toggle the 'drawWireframeSquares' flag. */ public void toggleDrawWireframeSquares() { this.emCanvas.drawWireframeSquares = !this.emCanvas.drawWireframeSquares; this.updateAndRedraw(); } /** Toggle the 'drawCompasses' flag, then update state and redraw. */ public void toggleDrawCompasses() { log("toggleDrawCompasses"); // The compass flag is ignored when wireframe is true, so if // we are toggling compass, also clear wireframe. this.emCanvas.drawWireframeSquares = false; this.emCanvas.drawCompasses = !this.emCanvas.drawCompasses; this.updateAndRedraw(); } /** Toggle the 'drawSurfaceNormals' flag. */ public void toggleDrawSurfaceNormals() { this.emCanvas.drawSurfaceNormals = !this.emCanvas.drawSurfaceNormals; this.updateAndRedraw(); } /** Toggle the 'drawCelestialNorth' flag. */ public void toggleDrawCelestialNorth() { this.emCanvas.drawCelestialNorth = !this.emCanvas.drawCelestialNorth; this.updateAndRedraw(); } /** Toggle the 'drawStarRays' flag. */ public void toggleDrawStarRays() { if (this.activeSquare == null) { this.errorBox("No square is active"); } else { this.activeSquare.drawStarRays = !this.activeSquare.drawStarRays; } this.emCanvas.redrawCanvas(); } private void toggleDrawUnitStarRays() { this.emCanvas.drawUnitStarRays = !this.emCanvas.drawUnitStarRays; this.updateAndRedraw(); } private void toggleDrawBaseSquareStarRays() { this.emCanvas.drawBaseSquareStarRays = !this.emCanvas.drawBaseSquareStarRays; this.updateAndRedraw(); } private void toggleDrawTravelPath() { this.emCanvas.drawTravelPath = !this.emCanvas.drawTravelPath; this.updateAndRedraw(); } private void toggleDrawActiveSquareAtOrigin() { this.emCanvas.drawActiveSquareAtOrigin = !this.emCanvas.drawActiveSquareAtOrigin; this.updateAndRedraw(); } private void toggleDrawWorldWireframe() { this.emCanvas.drawWorldWireframe = !this.emCanvas.drawWorldWireframe; this.updateAndRedraw(); } private void toggleDrawWorldStars() { this.emCanvas.drawWorldStars = !this.emCanvas.drawWorldStars; this.updateAndRedraw(); } private void toggleDrawSkybox() { this.emCanvas.drawSkybox = !this.emCanvas.drawSkybox; this.updateAndRedraw(); } private void turnOffAllStarRays() { this.emCanvas.turnOffAllStarRays(); this.emCanvas.redrawCanvas(); } /** Move the camera to the center of the active square. */ private void goToActiveSquareCenter() { if (this.activeSquare == null) { this.errorBox("No active square."); } else { this.moveCamera(this.activeSquare.center); } } /** Place the camera at the specified location. */ private void moveCamera(Vector3f loc) { this.emCanvas.cameraPosition = loc; this.updateAndRedraw(); } /** Update all stateful UI elements. */ public void updateUIState() { this.setStatusLabel(); this.setMenuState(); this.setInfoPanel(); } /** Set the status label text to reflect other state variables. * This also updates the state of stateful menu items. */ private void setStatusLabel() { StringBuilder sb = new StringBuilder(); sb.append(this.emCanvas.getStatusString()); sb.append(", model="+this.worldObservations.getDescription()); this.statusLabel.setText(sb.toString()); } /** Set the state of stateful menu items. */ private void setMenuState() { this.drawCoordinateAxesCBItem.setSelected(this.emCanvas.drawAxes); this.drawCrosshairCBItem.setSelected(this.emCanvas.drawCrosshair); this.drawWireframeSquaresCBItem.setSelected(this.emCanvas.drawWireframeSquares); this.drawCompassesCBItem.setSelected(this.emCanvas.drawCompasses); this.drawSurfaceNormalsCBItem.setSelected(this.emCanvas.drawSurfaceNormals); this.drawCelestialNorthCBItem.setSelected(this.emCanvas.drawCelestialNorth); this.drawStarRaysCBItem.setSelected(this.activeSquareDrawsStarRays()); this.drawUnitStarRaysCBItem.setSelected(this.emCanvas.drawUnitStarRays); this.drawBaseSquareStarRaysCBItem.setSelected(this.emCanvas.drawBaseSquareStarRays); this.drawTravelPathCBItem.setSelected(this.emCanvas.drawTravelPath); this.drawActiveSquareAtOriginCBItem.setSelected(this.emCanvas.drawActiveSquareAtOrigin); this.drawWorldWireframeCBItem.setSelected(this.emCanvas.drawWorldWireframe); this.drawWorldStarsCBItem.setSelected(this.emCanvas.drawWorldStars); this.drawSkyboxCBItem.setSelected(this.emCanvas.drawSkybox); this.useSunElevationCBItem.setSelected(this.useSunElevation); this.invertHorizontalCameraMovementCBItem.setSelected( this.emCanvas.invertHorizontalCameraMovement); this.invertVerticalCameraMovementCBItem.setSelected( this.emCanvas.invertVerticalCameraMovement); this.newAutomaticOrientationAlgorithmCBItem.setSelected( this.newAutomaticOrientationAlgorithm); this.assumeInfiniteStarDistanceCBItem.setSelected( this.assumeInfiniteStarDistance); this.onlyCompareElevationsCBItem.setSelected( this.onlyCompareElevations); } /** Update the contents of the info panel. */ private void setInfoPanel() { StringBuilder sb = new StringBuilder(); if (this.emCanvas.activeSquareAnimationState != 0) { sb.append("Animation state: "+this.emCanvas.activeSquareAnimationState+"\n"); } if (this.activeSquare == null) { sb.append("No active square.\n"); } else { sb.append("Active square:\n"); SurfaceSquare s = this.activeSquare; sb.append(" lat/lng: ("+s.latitude+","+s.longitude+")\n"); sb.append(" pos: "+s.center+"\n"); sb.append(" rot: "+s.rotationFromNominal+"\n"); ObservationStats ostats = this.fitOfObservations(s); if (ostats == null) { sb.append(" No obs stats\n"); } else { sb.append(" maxSep: "+ostats.maxSeparation+"\n"); sb.append(" sqrtVar: "+(float)Math.sqrt(ostats.variance)+"\n"); sb.append(" var: "+ostats.variance+"\n"); // What do we recommend to improve the variance? If it is // already zero, nothing. Otherwise, start by thinking we // should decrease the rotation angle. char recommendation = (ostats.variance == 0? ' ' : '-'); // What is the best rotation command, and what does it achieve? VarianceAfterRotations var = this.getVarianceAfterRotations(s, this.adjustOrientationDegrees); // Print the effects of all the available rotations. sb.append("\n"); for (RotationCommand rc : RotationCommand.values()) { sb.append(" adj("+rc.key+"): "); Double newVariance = var.rcToVariance.get(rc); if (newVariance == null) { sb.append("(none)\n"); } else { sb.append(""+newVariance+"\n"); } } // Make a final recommendation. if (var.bestRC != null) { recommendation = var.bestRC.key; } sb.append(" recommend: "+recommendation+"\n"); } } sb.append("\n"); sb.append("adjDeg: "+this.adjustOrientationDegrees+"\n"); // Compute average curvature from base. if (this.activeSquare != null && this.activeSquare.baseSquare != null) { sb.append("\n"); sb.append("Base at: ("+this.activeSquare.baseSquare.latitude+ ","+this.activeSquare.baseSquare.longitude+")\n"); CurvatureCalculator cc = this.computeAverageCurvature(this.activeSquare); double normalCurvatureDegPer1000km = FloatUtil.radiansToDegrees(cc.normalCurvature*1000); sb.append("Normal curvature: "+(float)normalCurvatureDegPer1000km+" deg per 1000 km\n"); if (cc.normalCurvature != 0) { sb.append("Radius: "+(float)(1/cc.normalCurvature)+" km\n"); } else { sb.append("Radius: Infinite\n"); } sb.append("Geodesic curvature: "+(float)(cc.geodesicCurvature*1000.0)+" deg per 1000 km\n"); sb.append("Geodesic torsion: "+(float)(cc.geodesicTorsion*1000.0)+" deg per 1000 km\n"); } // Also show star observation data. if (this.activeSquare != null) { sb.append("\n"); sb.append("Visible stars (az, el):\n"); // Iterate over stars in name order. TreeSet<String> stars = new TreeSet<String>(this.activeSquare.starObs.keySet()); for (String starName : stars) { StarObservation so = this.activeSquare.starObs.get(starName); sb.append(" "+so.name+": "+so.azimuth+", "+so.elevation+"\n"); } } this.infoPanel.setText(sb.toString()); } /** Compute the average curvature on a path from the base square * of 's' to 's'. */ private CurvatureCalculator computeAverageCurvature(SurfaceSquare s) { // Travel distance and heading. TravelObservation tobs = this.worldObservations.getTravelObservation( s.baseSquare.latitude, s.baseSquare.longitude, s.latitude, s.longitude); // Unit travel vector in base square coordinate system. Vector3f startTravel = Vector3f.headingToVector((float)tobs.startToEndHeading); startTravel = startTravel.rotateAADeg(s.baseSquare.rotationFromNominal); // And at derived square. Vector3f endTravel = Vector3f.headingToVector((float)tobs.endToStartHeading + 180); endTravel = endTravel.rotateAADeg(s.rotationFromNominal); // Calculate curvature and twist. CurvatureCalculator c = new CurvatureCalculator(); c.distanceKm = tobs.distanceKm; c.computeFromNormals(s.baseSquare.up, s.up, startTravel, endTravel); return c; } /** Result of call to 'getVarianceAfterRotations'. */ private static class VarianceAfterRotations { /** Variance produced by each rotation. The value can be null, * meaning the rotation produces a situation where we can't * measure the variance (e.g., because not enough stars are * above the horizon). */ public HashMap<RotationCommand, Double> rcToVariance = new HashMap<RotationCommand, Double>(); /** Which rotation command produces the greatest improvement * in variance, if any. */ public RotationCommand bestRC = null; /** If true, the solution space is underconstrained, meaning * the best orientation is not unique. */ public boolean underconstrained = false; } /** Perform a trial rotation in each direction and record the * resulting variance, plus a decision about which is best, if any. * This returns null if we do not have enough data to measure * the fitness of the square's orientation. */ private VarianceAfterRotations getVarianceAfterRotations(SurfaceSquare s, float adjustDegrees) { // Get variance if no rotation is performed. We only recommend // a rotation if it improves on this. ObservationStats ostats = this.fitOfObservations(s); if (ostats == null) { return null; } VarianceAfterRotations ret = new VarianceAfterRotations(); // Variance achieved by the best rotation command, if there is one. double bestNewVariance = 0; // Get the effects of all the available rotations. for (RotationCommand rc : RotationCommand.values()) { ObservationStats newStats = this.fitOfAdjustedSquare(s, rc.axis.times(adjustDegrees)); if (newStats == null || newStats.numSamples < 2) { ret.rcToVariance.put(rc, null); } else { double newVariance = newStats.variance; ret.rcToVariance.put(rc, newVariance); if (ostats.variance == 0 && newVariance == 0) { // The current orientation is ideal, but here // is a rotation that keeps it ideal. That // must mean that the solution space is under- // constrained. // // Note: This is an unnecessarily strong condition for // being underconstrained. It requires that we // find a zero in the objective function, and // furthermore that the solution space be parallel // to one of the three local rotation axes. I have // some ideas for more robust detection of underconstraint, // but haven't tried to implement them yet. For now I // will rely on manual inspection of the rotation cube // analysis dialog. ret.underconstrained = true; } if (newVariance < ostats.variance && (ret.bestRC == null || newVariance < bestNewVariance)) { ret.bestRC = rc; bestNewVariance = newVariance; } } } return ret; } /** True if there is an active square and it is drawing star rays. */ private boolean activeSquareDrawsStarRays() { return this.activeSquare != null && this.activeSquare.drawStarRays; } /** Replace the current observations with a new source and clear * the virtual map. */ private void changeObservations(WorldObservations obs) { this.clearSurfaceSquares(); this.worldObservations = obs; // Enable all stars in the new model. this.enabledStars.clear(); for (String starName : this.worldObservations.getAllStars()) { this.enabledStars.put(starName, true); } this.updateAndRedraw(); } /** Refresh all the UI elements and the map canvas. */ private void updateAndRedraw() { this.updateUIState(); this.emCanvas.redrawCanvas(); } /** Return true if the named star is enabled. */ public boolean isStarEnabled(String starName) { return this.enabledStars.containsKey(starName) && this.enabledStars.get(starName).equals(true); } /** Do some initial steps so I do not have to do them manually each * time I start the program when I'm working on a certain feature. * The exact setup here will vary over time as I work on different * things; it is only meant for use while testing or debugging. */ private void doCannedSetup() { // Disable all stars except for Betelgeuse and Dubhe. this.enabledStars.clear(); for (String starName : this.worldObservations.getAllStars()) { boolean en = (starName.equals("Betelgeuse") || starName.equals("Dubhe")); this.enabledStars.put(starName, en); } // Build first square in SF as usual. this.startNewSurfaceAt(38, -122); // Build next near Washington, DC. this.buildNextSquareAt(38, -77); // The plan is to align with just two stars, so we need this. this.assumeInfiniteStarDistance = true; // Position the camera to see DC square. if (this.emCanvas.drawActiveSquareAtOrigin) { // This is a canned command in the middle of a session. // Do not reposition the camera. } else { this.emCanvas.drawActiveSquareAtOrigin = true; // this.emCanvas.cameraPosition = new Vector3f(-0.19f, 0.56f, 1.20f); // this.emCanvas.cameraAzimuthDegrees = 0.0f; // this.emCanvas.cameraPitchDegrees = -11.0f; this.emCanvas.cameraPosition = new Vector3f(-0.89f, 0.52f, -1.06f); this.emCanvas.cameraAzimuthDegrees = 214.0f; this.emCanvas.cameraPitchDegrees = 1.0f; } // Use wireframes for squares, no world wireframe, but add surface normals. this.emCanvas.drawWireframeSquares = true; this.emCanvas.drawWorldWireframe = false; this.emCanvas.drawSurfaceNormals = true; this.emCanvas.drawTravelPath = false; // Show its star rays, and those at SF, as unit vectors. this.activeSquare.drawStarRays = true; this.emCanvas.drawBaseSquareStarRays = true; this.emCanvas.drawUnitStarRays = true; // Reset the animation. this.emCanvas.activeSquareAnimationState = 0; this.updateAndRedraw(); } /** Set the animation state either to 0, or to an amount * offset by 's'. */ private void setAnimationState(int s) { if (s == 0) { this.emCanvas.activeSquareAnimationState = 0; } else { this.emCanvas.activeSquareAnimationState += s; } this.updateAndRedraw(); } // ------------------------------- Animation -------------------------------- /** When animation begins, this is the rotation of the active * square relative to its base. */ private Vector3f animatedRotationStartRotation; /** Angle through which to rotate the active square. */ private double animatedRotationAngle; /** Axis about which to rotate the active square. */ private Vector3f animatedRotationAxis; /** Seconds the animation should take to complete. */ private float animatedRotationSeconds; /** How many seconds have elapsed since we started animating. * This is clamped to 'animatedRotationSeconds', and when * it is equal, the animation is complete. */ private float animatedRotationElapsed; /** Start a new rotation animation of the active square by * 'angle' degrees about 'axis' for 'seconds'. */ public void beginAnimatedRotation(double angle, Vector3f axis, float seconds) { if (this.activeSquare != null && this.activeSquare.baseSquare != null) { log("starting rotation animation"); this.animatedRotationStartRotation = this.activeSquare.rotationFromBase; this.animatedRotationAngle = angle; this.animatedRotationAxis = axis; this.animatedRotationSeconds = seconds; this.animatedRotationElapsed = 0; } } /** If animating, advance to the next frame, based on 'deltaSeconds' * having elapsed since the last animated frame. * * This should *not* trigger a redraw, since that will cause this * function to be called again during the same frame. */ public void nextAnimatedRotationFrame(float deltaSeconds) { if (this.animatedRotationElapsed < this.animatedRotationSeconds && this.activeSquare != null && this.activeSquare.baseSquare != null) { this.animatedRotationElapsed = FloatUtil.clampf( this.animatedRotationElapsed + deltaSeconds, 0, this.animatedRotationSeconds); // How much do we want the square to be rotated, // relative to its orientation at the start of // the animation? double rotFraction = this.animatedRotationElapsed / this.animatedRotationSeconds; Vector3f partialRot = this.animatedRotationAxis.timesd( this.animatedRotationAngle * rotFraction); // Compose with the original orientation. Vector3f newRotationFromBase = Vector3f.composeRotations(this.animatedRotationStartRotation, partialRot); SurfaceSquare s = replaceWithNewRotation( this.activeSquare.baseSquare, this.activeSquare, newRotationFromBase); this.setActiveSquareNoRedraw(s); if (this.animatedRotationElapsed >= this.animatedRotationSeconds) { log("rotation animation finished; normal: "+s.up); } } } /** Do any "physics" world updates. This is invoked prior to * rendering a frame in the GL canvas. */ public void updatePhysics(float elapsedSeconds) { this.nextAnimatedRotationFrame(elapsedSeconds); } /** Get list of stars for which both squares have observations. */ private List<String> getCommonStars(SurfaceSquare s1, SurfaceSquare s2) { ArrayList<String> ret = new ArrayList<String>(); for (Map.Entry<String, StarObservation> entry : s1.starObs.entrySet()) { if (s2.findObservation(entry.getKey()) != null) { ret.add(entry.getKey()); } } return ret; } private void showCurvatureDialog() { // Default initial values. CurvatureCalculator c = CurvatureCalculator.getDubheSirius(); // Try to populate 'c' with values from the current square. if (this.activeSquare != null && this.activeSquare.baseSquare != null) { SurfaceSquare start = this.activeSquare.baseSquare; SurfaceSquare end = this.activeSquare; List<String> common = getCommonStars(start, end); if (common.size() >= 2) { // When Dubhe and Betelgeuse are the only ones, I want // Dubhe first so the calculation agrees with the ad-hoc // animation, and doing them in this order accomplishes // that. String A = common.get(1); String B = common.get(0); log("initializing curvature dialog with "+A+" and "+B); c.start_A_az = start.findObservation(A).azimuth; c.start_A_el = start.findObservation(A).elevation; c.start_B_az = start.findObservation(B).azimuth; c.start_B_el = start.findObservation(B).elevation; c.end_A_az = end.findObservation(A).azimuth; c.end_A_el = end.findObservation(A).elevation; c.end_B_az = end.findObservation(B).azimuth; c.end_B_el = end.findObservation(B).elevation; c.setTravelByLatLong(start.latitude, start.longitude, end.latitude, end.longitude); } } // Run the dialog. (new CurvatureCalculatorDialog(EarthShape.this, c)).exec(); } } // EOF<|fim▁end|>
public void setActiveSquareNoRedraw(SurfaceSquare sq) { if (this.activeSquare != null) {
<|file_name|>inventory.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # Copyright 2017 Northern.tech AS #<|fim▁hole|># You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from MenderAPI import * class Inventory(): auth = None def __init__(self, auth): self.reset() self.auth = auth def reset(self): # Reset all temporary values. pass def get_inv_base_path(self): return "https://%s/api/management/%s/inventory/" % (get_mender_gateway(), api_version) def get_devices(self, has_group=None): """get_devices API. has_group can be True/False/None string.""" headers = self.auth.get_auth_token() params = {} if has_group is not None: params = ({"has_group": has_group}) ret = requests.get(self.get_inv_base_path() + "devices", params=params, headers=self.auth.get_auth_token(), verify=False) assert ret.status_code == requests.status_codes.codes.ok return ret.json() def get_device(self, device_id): headers = self.auth.get_auth_token() devurl = "%s%s/%s" % (self.get_inv_base_path(), "device", device_id) ret = requests.get(devurl, headers=self.auth.get_auth_token(), verify=False) return ret def get_groups(self): ret = requests.get(self.get_inv_base_path() + "groups", headers=self.auth.get_auth_token(), verify=False) assert ret.status_code == requests.status_codes.codes.ok return ret.json() def get_devices_in_group(self, group): req = "groups/%s/devices" % group ret = requests.get(self.get_inv_base_path() + req, headers=self.auth.get_auth_token(), verify=False) assert ret.status_code == requests.status_codes.codes.ok return ret.json() def get_device_group(self, device): req = "devices/%s/group" % device ret = requests.get(self.get_inv_base_path() + req, headers=self.auth.get_auth_token(), verify=False) assert ret.status_code == requests.status_codes.codes.ok return ret.json() def put_device_in_group(self, device, group): headers = {"Content-Type": "application/json"} headers.update(self.auth.get_auth_token()) body = '{"group":"%s"}' % group req = "devices/%s/group" % device ret = requests.put(self.get_inv_base_path() + req, data=body, headers=headers, verify=False) assert ret.status_code == requests.status_codes.codes.no_content def delete_device_from_group(self, device, group): req = "devices/%s/group/%s" % (device, group) ret = requests.delete(self.get_inv_base_path() + req, headers=self.auth.get_auth_token(), verify=False) assert ret.status_code == requests.status_codes.codes.no_content<|fim▁end|>
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License.
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|># License for the specific language governing permissions and limitations # under the License. def load_test_data(load_onto=None): from openstack_dashboard.test.test_data import ceilometer_data from openstack_dashboard.test.test_data import cinder_data from openstack_dashboard.test.test_data import exceptions from openstack_dashboard.test.test_data import glance_data from openstack_dashboard.test.test_data import heat_data from openstack_dashboard.test.test_data import keystone_data from openstack_dashboard.test.test_data import neutron_data from openstack_dashboard.test.test_data import nova_data from openstack_dashboard.test.test_data import swift_data from openstack_dashboard.test.test_data import trove_data # The order of these loaders matters, some depend on others. loaders = ( exceptions.data, keystone_data.data, glance_data.data, nova_data.data, cinder_data.data, neutron_data.data, swift_data.data, heat_data.data, ceilometer_data.data, trove_data.data, ) if load_onto: for data_func in loaders: data_func(load_onto) return load_onto else: return TestData(*loaders) class TestData(object): """Holder object for test data. Any functions passed to the init method will be called with the ``TestData`` object as their only argument. They can then load data onto the object as desired. The idea is to use the instantiated object like this:: >>> import glance_data >>> TEST = TestData(glance_data.data) >>> TEST.images.list() [<Image: visible_image>, <Image: invisible_image>] >>> TEST.images.first() <Image: visible_image> You can load as little or as much data as you like as long as the loaders don't conflict with each other. See the :class:`~openstack_dashboard.test.test_data.utils.TestDataContainer` class for a list of available methods. """ def __init__(self, *args): for data_func in args: data_func(self) class TestDataContainer(object): """A container for test data objects. The behavior of this class is meant to mimic a "manager" class, which has convenient shortcuts for common actions like "list", "filter", "get", and "add". """ def __init__(self): self._objects = [] def add(self, *args): """Add a new object to this container. Generally this method should only be used during data loading, since adding data during a test can affect the results of other tests. """ for obj in args: if obj not in self._objects: self._objects.append(obj) def list(self): """Returns a list of all objects in this container.""" return self._objects def filter(self, filtered=None, **kwargs): """Returns objects in this container whose attributes match the given keyword arguments. """ if filtered is None: filtered = self._objects try: key, value = kwargs.popitem() except KeyError: # We're out of filters, return return filtered def get_match(obj): return hasattr(obj, key) and getattr(obj, key) == value return self.filter(filtered=filter(get_match, filtered), **kwargs) def get(self, **kwargs): """Returns the single object in this container whose attributes match the given keyword arguments. An error will be raised if the arguments provided don't return exactly one match. """ matches = self.filter(**kwargs) if not matches: raise Exception("No matches found.") elif len(matches) > 1: raise Exception("Multiple matches found.") else: return matches.pop() def first(self): """Returns the first object from this container.""" return self._objects[0] def count(self): return len(self._objects)<|fim▁end|>
# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate byteorder; extern crate wasm_ast; use byteorder::{ByteOrder, LittleEndian}; use std::collections::HashMap; use std::default::Default; use std::iter::repeat; use wasm_ast::{BinOp, Expr, Function, Size, UnaryOp}; use wasm_ast::BinOp::{Add, And, Copysign, Div, Eq, Ge, Gt, Le, Lt, Max, Min, Mul, Ne}; use wasm_ast::BinOp::{Or, Rem, RotL, RotR, Shl, Shr, Sub, Xor}; use wasm_ast::Const::{F32Const, F64Const, I32Const, I64Const}; use wasm_ast::Expr::{BinOpExpr, CallExpr, ConstExpr, GetLocalExpr, GrowMemoryExpr, IfThenExpr, IfThenElseExpr, LoadExpr, NopExpr, SetLocalExpr, StoreExpr, UnaryOpExpr}; use wasm_ast::Size::{Bits8, Bits16, Bits32, Bits64}; use wasm_ast::SignedTyp::{F32s, F64s, I32s, I64s, U32s, U64s}; use wasm_ast::Typ::{F32, F64, I32, I64}; use wasm_ast::UnaryOp::{Abs, Ceil, Clz, Ctz, Eqz, Floor, Nearest, Neg, Popcnt, Sqrt, Trunc}; trait FunctionTable { fn lookup_function(&self, name: &str) -> &Function; } trait InitialHeap { fn init_heap(&self) -> Vec<u8>; } trait InterpretPrimitives<T> { fn get_raw(&self, _: &Size, _: &[u8]) -> T; fn set_raw(&self, _: &Size, _: &mut[u8], _: T); fn as_raw(&self, value: T) -> [u8;8] { let mut result = [0;8]; self.set_raw(&Bits64, &mut result, value); result } fn type_error(&self) -> T { panic!("Type error.") } fn binop_f32(&self, _: &BinOp, _: f32, _: f32) -> T { self.type_error() } fn binop_f64(&self, _: &BinOp, _: f64, _: f64) -> T { self.type_error() } fn binop_i32(&self, _: &BinOp, _: i32, _: i32) -> T { self.type_error() } fn binop_i64(&self, _: &BinOp, _: i64, _: i64) -> T { self.type_error() } fn binop_u32(&self, _: &BinOp, _: u32, _: u32) -> T { self.type_error() } fn binop_u64(&self, _: &BinOp, _: u64, _: u64) -> T { self.type_error() } fn unop_f32(&self, _: &UnaryOp, _: f32) -> T { self.type_error() } fn unop_f64(&self, _: &UnaryOp, _: f64) -> T { self.type_error() } fn unop_i32(&self, _: &UnaryOp, _: i32) -> T { self.type_error() } fn unop_i64(&self, _: &UnaryOp, _: i64) -> T { self.type_error() } fn unop_u32(&self, _: &UnaryOp, _: u32) -> T { self.type_error() } fn unop_u64(&self, _: &UnaryOp, _: u64) -> T { self.type_error() } fn from_f32(&self, _: f32) -> T { self.type_error() } fn from_f64(&self, _: f64) -> T { self.type_error() } fn from_i32(&self, _: u32) -> T { self.type_error() } fn from_i64(&self, _: u64) -> T { self.type_error() } } trait InterpretExpr<T> { fn interpret_expr(&self, expr: &Expr, locals: &mut[[u8;8]], heap: &mut Vec<u8>) -> T; } trait InterpretMain { fn interpret_main(&self); } pub struct Program { functions: HashMap<String, Function>, heap: Vec<u8>, } impl FunctionTable for Program { fn lookup_function(&self, name: &str) -> &Function { &self.functions.get(name).unwrap() } } impl InitialHeap for Program { fn init_heap(&self) -> Vec<u8> { self.heap.clone() } } impl<I> InterpretPrimitives<()> for I { fn get_raw(&self, _: &Size, _: &[u8]) {} fn set_raw(&self, _: &Size, _: &mut [u8], _: ()) {} fn binop_f32(&self, _: &BinOp, _: f32, _: f32) {} fn binop_f64(&self, _: &BinOp, _: f64, _: f64) {} fn binop_i32(&self, _: &BinOp, _: i32, _: i32) {} fn binop_i64(&self, _: &BinOp, _: i64, _: i64) {} fn binop_u32(&self, _: &BinOp, _: u32, _: u32) {} fn binop_u64(&self, _: &BinOp, _: u64, _: u64) {} fn unop_f32(&self, _: &UnaryOp, _: f32) {} fn unop_f64(&self, _: &UnaryOp, _: f64) {} fn unop_i32(&self, _: &UnaryOp, _: i32) {} fn unop_i64(&self, _: &UnaryOp, _: i64) {} fn unop_u32(&self, _: &UnaryOp, _: u32) {} fn unop_u64(&self, _: &UnaryOp, _: u64) {} } impl<I> InterpretPrimitives<f32> for I { fn binop_f32(&self, op: &BinOp, lhs: f32, rhs: f32) -> f32 { match op { &Add => (lhs + rhs), &Copysign => (lhs * rhs.signum()), &Div => (lhs / rhs), &Max => (lhs.max(rhs)), &Min => (lhs.min(rhs)), &Mul => (lhs * rhs), &Sub => (lhs - rhs), _ => self.type_error(), } } fn unop_f32(&self, op: &UnaryOp, arg: f32) -> f32 { match op { &Abs => arg.abs(), &Ceil => arg.ceil(), &Floor => arg.floor(), &Nearest => arg.round(), &Neg => -arg, &Sqrt => arg.sqrt(), &Trunc => arg.trunc(), _ => self.type_error(), } } fn from_f32(&self, value: f32) -> f32 { value } fn get_raw(&self, size: &Size, bytes: &[u8]) -> f32 { match size { &Bits64 => LittleEndian::read_f32(&bytes[4..]), &Bits32 => LittleEndian::read_f32(bytes), _ => self.type_error(), } } fn set_raw(&self, size: &Size, bytes: &mut [u8], value: f32) { match size { &Bits64 => LittleEndian::write_f32(&mut bytes[4..], value), &Bits32 => LittleEndian::write_f32(bytes, value), _ => self.type_error(), } } } impl<I> InterpretPrimitives<f64> for I { fn binop_f64(&self, op: &BinOp, lhs: f64, rhs: f64) -> f64 { match op { &Add => (lhs + rhs), &Copysign => (lhs * rhs.signum()), &Div => (lhs / rhs), &Max => (lhs.max(rhs)), &Min => (lhs.min(rhs)), &Mul => (lhs * rhs), &Sub => (lhs - rhs), _ => self.type_error(), } } fn unop_f64(&self, op: &UnaryOp, arg: f64) -> f64 { match op { &Abs => arg.abs(), &Ceil => arg.ceil(), &Floor => arg.floor(), &Nearest => arg.round(), &Neg => -arg, &Sqrt => arg.sqrt(), &Trunc => arg.trunc(), _ => self.type_error(), } } fn from_f64(&self, value: f64) -> f64 { value } fn get_raw(&self, size: &Size, bytes: &[u8]) -> f64 { match size { &Bits64 => LittleEndian::read_f64(bytes), _ => self.type_error(), } } fn set_raw(&self, size: &Size, bytes: &mut [u8], value: f64) { match size { &Bits64 => LittleEndian::write_f64(bytes, value), _ => self.type_error(), } } } impl<I> InterpretPrimitives<i32> for I { fn binop_i32(&self, op: &BinOp, lhs: i32, rhs: i32) -> i32 { match op { &Add => (lhs.wrapping_add(rhs)), &Div => (lhs / rhs), &Mul => (lhs.wrapping_mul(rhs)), &Rem => (lhs % rhs), _ => self.type_error(), } } fn from_i32(&self, value: u32) -> i32 { value as i32 } fn get_raw(&self, size: &Size, bytes: &[u8]) -> i32 { match size { &Bits64 => LittleEndian::read_i32(&bytes[4..]), &Bits32 => LittleEndian::read_i32(bytes), &Bits16 => LittleEndian::read_i16(bytes) as i32, &Bits8 => (bytes[0] as i8) as i32, } } fn set_raw(&self, size: &Size, bytes: &mut [u8], value: i32) { match size { &Bits64 => LittleEndian::write_i32(&mut bytes[4..], value), &Bits32 => LittleEndian::write_i32(bytes, value as i32), &Bits16 => LittleEndian::write_i16(bytes, value as i16), &Bits8 => bytes[0] = value as u8, } } } impl<I> InterpretPrimitives<i64> for I { fn binop_i64(&self, op: &BinOp, lhs: i64, rhs: i64) -> i64 { match op { &Add => (lhs.wrapping_add(rhs)), &Div => (lhs / rhs), &Mul => (lhs.wrapping_mul(rhs)), &Rem => (lhs % rhs), _ => self.type_error(), } } fn from_i64(&self, value: u64) -> i64 { value as i64 } fn get_raw(&self, size: &Size, bytes: &[u8]) -> i64 { match size { &Bits64 => LittleEndian::read_i64(bytes), &Bits32 => LittleEndian::read_i32(bytes) as i64, &Bits16 => LittleEndian::read_i16(bytes) as i64, &Bits8 => (bytes[0] as i8) as i64, } } fn set_raw(&self, size: &Size, bytes: &mut [u8], value: i64) { match size { &Bits64 => LittleEndian::write_i64(bytes, value), &Bits32 => LittleEndian::write_i32(bytes, value as i32), &Bits16 => LittleEndian::write_i16(bytes, value as i16), &Bits8 => bytes[0] = value as u8, } } } impl<I> InterpretPrimitives<u32> for I { fn binop_i32(&self, op: &BinOp, lhs: i32, rhs: i32) -> u32 { match op { &Ge => (lhs >= rhs) as u32, &Gt => (lhs > rhs) as u32, &Le => (lhs <= rhs) as u32, &Lt => (lhs < rhs) as u32, _ => self.type_error(), } } fn binop_u32(&self, op: &BinOp, lhs: u32, rhs: u32) -> u32 { match op { &Add => (lhs.wrapping_add(rhs)), &And => (lhs & rhs), &Div => (lhs / rhs), &Eq => (lhs == rhs) as u32, &Ge => (lhs >= rhs) as u32, &Gt => (lhs > rhs) as u32, &Le => (lhs <= rhs) as u32, &Lt => (lhs < rhs) as u32, &Mul => (lhs.wrapping_mul(rhs)), &Ne => (lhs != rhs) as u32, &Or => (lhs | rhs), &Rem => (lhs % rhs), &RotL => (lhs.rotate_left(rhs)), &RotR => (lhs.rotate_right(rhs)), &Shl => (lhs.wrapping_shl(rhs)), &Shr => (lhs.wrapping_shr(rhs)), &Sub => (lhs.wrapping_sub(rhs)), &Xor => (lhs ^ rhs), _ => self.type_error(), } } fn binop_f32(&self, op: &BinOp, lhs: f32, rhs: f32) -> u32 { match op { &Eq => (lhs == rhs) as u32, &Ge => (lhs >= rhs) as u32, &Gt => (lhs > rhs) as u32, &Le => (lhs <= rhs) as u32, &Lt => (lhs < rhs) as u32, &Ne => (lhs != rhs) as u32, _ => self.type_error(), } } fn unop_u32(&self, op: &UnaryOp, arg: u32) -> u32 { match op { &Clz => arg.leading_zeros(), &Ctz => arg.trailing_zeros(), &Popcnt => arg.count_ones(), &Eqz => (arg == 0) as u32, _ => self.type_error(), } } fn from_i32(&self, value: u32) -> u32 { value } fn get_raw(&self, size: &Size, bytes: &[u8]) -> u32 { match size { &Bits64 => LittleEndian::read_u32(&bytes[4..]), &Bits32 => LittleEndian::read_u32(bytes), &Bits16 => LittleEndian::read_u16(bytes) as u32, &Bits8 => bytes[0] as u32, } } fn set_raw(&self, size: &Size, bytes: &mut [u8], value: u32) { match size { &Bits64 => LittleEndian::write_u32(&mut bytes[4..], value), &Bits32 => LittleEndian::write_u32(bytes, value), &Bits16 => LittleEndian::write_u16(bytes, value as u16), &Bits8 => bytes[0] = value as u8, } }<|fim▁hole|>impl<I> InterpretPrimitives<u64> for I { fn binop_u64(&self, op: &BinOp, lhs: u64, rhs: u64) -> u64 { match op { &Add => (lhs.wrapping_add(rhs)), &And => (lhs & rhs), &Div => (lhs / rhs), &Mul => (lhs.wrapping_mul(rhs)), &Or => (lhs | rhs), &Rem => (lhs % rhs), &RotL => (lhs.rotate_left(rhs as u32)), &RotR => (lhs.rotate_right(rhs as u32)), &Shl => (lhs.wrapping_shl(rhs as u32)), &Shr => (lhs.wrapping_shr(rhs as u32)), &Sub => (lhs.wrapping_sub(rhs)), &Xor => (lhs ^ rhs), _ => self.type_error(), } } fn unop_u64(&self, op: &UnaryOp, arg: u64) -> u64 { match op { &Clz => arg.leading_zeros() as u64, &Ctz => arg.trailing_zeros() as u64, &Popcnt => arg.count_ones() as u64, _ => self.type_error(), } } fn from_i64(&self, value: u64) -> u64 { value } fn get_raw(&self, size: &Size, bytes: &[u8]) -> u64 { match size { &Bits64 => LittleEndian::read_u64(bytes), &Bits32 => LittleEndian::read_u32(bytes) as u64, &Bits16 => LittleEndian::read_u16(bytes) as u64, &Bits8 => bytes[0] as u64, } } fn set_raw(&self, size: &Size, bytes: &mut [u8], value: u64) { match size { &Bits64 => LittleEndian::write_u64(bytes, value), &Bits32 => LittleEndian::write_u32(bytes, value as u32), &Bits16 => LittleEndian::write_u16(bytes, value as u16), &Bits8 => bytes[0] = value as u8, } } } impl<I, T> InterpretExpr<T> for I where I: InterpretPrimitives<()> + InterpretPrimitives<T> + InterpretPrimitives<f32> + InterpretPrimitives<f64> + InterpretPrimitives<i32> + InterpretPrimitives<i64> + InterpretPrimitives<u32> + InterpretPrimitives<u64> + FunctionTable, T: Copy + Default, { fn interpret_expr(&self, expr: &Expr, locals: &mut[[u8;8]], heap: &mut Vec<u8>) -> T { // NOTE: currently only handling the control flow that can be dealt with in direct style. // More sophisticated control flow will require a technique for handling a CFG, // e.g. functional SSA. match expr { &BinOpExpr(F32s, ref op, ref lhs, ref rhs) => { let lhs: f32 = self.interpret_expr(lhs, locals, heap); let rhs: f32 = self.interpret_expr(rhs, locals, heap); self.binop_f32(op, lhs, rhs) }, &BinOpExpr(F64s, ref op, ref lhs, ref rhs) => { let lhs: f64 = self.interpret_expr(lhs, locals, heap); let rhs: f64 = self.interpret_expr(rhs, locals, heap); self.binop_f64(op, lhs, rhs) }, &BinOpExpr(I32s, ref op, ref lhs, ref rhs) => { let lhs: i32 = self.interpret_expr(lhs, locals, heap); let rhs: i32 = self.interpret_expr(rhs, locals, heap); self.binop_i32(op, lhs, rhs) }, &BinOpExpr(I64s, ref op, ref lhs, ref rhs) => { let lhs: i64 = self.interpret_expr(lhs, locals, heap); let rhs: i64 = self.interpret_expr(rhs, locals, heap); self.binop_i64(op, lhs, rhs) }, &BinOpExpr(U32s, ref op, ref lhs, ref rhs) => { let lhs: u32 = self.interpret_expr(lhs, locals, heap); let rhs: u32 = self.interpret_expr(rhs, locals, heap); self.binop_u32(op, lhs, rhs) }, &BinOpExpr(U64s, ref op, ref lhs, ref rhs) => { let lhs: u64 = self.interpret_expr(lhs, locals, heap); let rhs: u64 = self.interpret_expr(rhs, locals, heap); self.binop_u64(op, lhs, rhs) }, &CallExpr(ref name, ref args) => { let defn = self.lookup_function(name); let mut new_locals: Vec<[u8;8]> = defn.params.iter().zip(args).map(|(param, arg)| { match param.typ { F32 => { let value: f32 = self.interpret_expr(arg, locals, heap); self.as_raw(value) }, F64 => { let value: f64 = self.interpret_expr(arg, locals, heap); self.as_raw(value) }, I32 => { let value: u32 = self.interpret_expr(arg, locals, heap); self.as_raw(value) }, I64 => { let value: u64 = self.interpret_expr(arg, locals, heap); self.as_raw(value) }, } }).chain(repeat([0;8]).take(defn.locals.len())).collect(); let last = defn.body.len() - 1; for expr in &defn.body[..last] { let () = self.interpret_expr(expr, &mut *new_locals, heap); } for expr in &defn.body[last..] { return self.interpret_expr(expr, &mut *new_locals, heap); } T::default() }, &ConstExpr(F32Const(value)) => self.from_f32(value), &ConstExpr(F64Const(value)) => self.from_f64(value), &ConstExpr(I32Const(value)) => self.from_i32(value), &ConstExpr(I64Const(value)) => self.from_i64(value), &GetLocalExpr(ref var) => self.get_raw(&Bits64, &locals[var.position]), &GrowMemoryExpr(ref ext) => { let result: u32 = heap.len() as u32; let ext: u32 = self.interpret_expr(ext, locals, heap); heap.extend(repeat(0).take(ext as usize)); self.from_i32(result) }, &IfThenExpr(ref cond, ref true_branch) => { let cond: u32 = self.interpret_expr(cond, locals, heap); if cond == 0 { T::default() } else { self.interpret_expr(true_branch, locals, heap) } }, &IfThenElseExpr(ref cond, ref true_branch, ref false_branch) => { let cond: u32 = self.interpret_expr(cond, locals, heap); if cond == 0 { self.interpret_expr(false_branch, locals, heap) } else { self.interpret_expr(true_branch, locals, heap) } }, &LoadExpr(_, ref size, ref addr) => { let addr: u32 = self.interpret_expr(addr, locals, heap); self.get_raw(size, &heap[addr as usize..]) }, &NopExpr => T::default(), &SetLocalExpr(ref var, ref value) => { let value: T = self.interpret_expr(value, locals, heap); self.set_raw(&Bits64, &mut locals[var.position], value); value }, &StoreExpr(_, ref size, ref addr, ref value) => { let addr: u32 = self.interpret_expr(addr, locals, heap); let value: T = self.interpret_expr(value, locals, heap); self.set_raw(size, &mut heap[addr as usize..], value); value }, &UnaryOpExpr(F32, ref op, ref arg) => { let arg: f32 = self.interpret_expr(arg, locals, heap); self.unop_f32(op, arg) }, &UnaryOpExpr(F64, ref op, ref arg) => { let arg: f64 = self.interpret_expr(arg, locals, heap); self.unop_f64(op, arg) }, &UnaryOpExpr(I32, ref op, ref arg) => { let arg: u32 = self.interpret_expr(arg, locals, heap); self.unop_u32(op, arg) }, &UnaryOpExpr(I64, ref op, ref arg) => { let arg: u64 = self.interpret_expr(arg, locals, heap); self.unop_u64(op, arg) }, } } } impl<I> InterpretMain for I where I: InterpretExpr<()> + FunctionTable + InitialHeap { fn interpret_main(&self) { let main = self.lookup_function("main"); let mut locals: Vec<[u8;8]> = repeat([0;8]).take(main.locals.len()).collect(); let mut heap = self.init_heap(); for expr in &main.body { let () = self.interpret_expr(&expr, &mut locals, &mut heap); } } }<|fim▁end|>
}
<|file_name|>0003_rename_deleted_field.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2019-06-27 20:29 from __future__ import unicode_literals from django.db import migrations import osf.utils.fields class Migration(migrations.Migration): <|fim▁hole|> ] operations = [ migrations.RenameField( model_name='nodesettings', new_name='is_deleted', old_name='deleted', ), migrations.RenameField( model_name='usersettings', new_name='is_deleted', old_name='deleted', ), migrations.AddField( model_name='nodesettings', name='deleted', field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), ), migrations.AddField( model_name='usersettings', name='deleted', field=osf.utils.fields.NonNaiveDateTimeField(blank=True, null=True), ), ]<|fim▁end|>
dependencies = [ ('addons_onedrive', '0002_auto_20171121_1426'),
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use db::Pool; use rocket::{self, Rocket, Request}; use rocket_contrib::{Json, Value}; use std::env; mod teams; mod members; pub fn app(pool: Pool) -> Rocket { set_log_level(); rocket::ignite() .manage(pool) .mount("/teams", teams::routes()) .mount("/members", members::routes()) .catch(errors![not_found, server_error]) } #[error(404)] fn not_found(_req: &Request) -> Json<Value> { Json(json!({ "message": "Not Found" })) } #[error(500)] fn server_error(_req: &Request) -> Json<Value> { Json(json!({ "message": "Internal Server Error" }))<|fim▁hole|> fn set_log_level() { env::set_var("ROCKET_LOG", "critical"); }<|fim▁end|>
}
<|file_name|>SerializerTest.ts<|end_file_name|><|fim▁begin|>import { Pipeline, Step } from '@ephox/agar'; import { Arr } from '@ephox/katamari'; import { LegacyUnit } from '@ephox/mcagar'; import Serializer from 'tinymce/core/api/dom/Serializer'; import DOMUtils from 'tinymce/core/api/dom/DOMUtils'; import TrimHtml from 'tinymce/core/dom/TrimHtml'; import ViewBlock from '../../module/test/ViewBlock'; import Zwsp from 'tinymce/core/text/Zwsp'; import { UnitTest } from '@ephox/bedrock'; declare const escape: any; UnitTest.asynctest('browser.tinymce.core.dom.SerializerTest', function () { const success = arguments[arguments.length - 2]; const failure = arguments[arguments.length - 1]; const suite = LegacyUnit.createSuite(); const DOM = DOMUtils.DOM; const viewBlock = ViewBlock(); const teardown = function () { viewBlock.update(''); }; const addTeardown = function (steps) { return Arr.bind(steps, function (step) { return [step, Step.sync(teardown)]; }); }; suite.test('Schema rules', function () { let ser = Serializer({ fix_list_elements : true }); ser.setRules('@[id|title|class|style],div,img[src|alt|-style|border],span,hr'); DOM.setHTML('test', '<img title="test" src="tinymce/ui/img/raster.gif" data-mce-src="tinymce/ui/img/raster.gif" alt="test" ' + 'border="0" style="border: 1px solid red" class="test" /><span id="test2">test</span><hr />'); LegacyUnit.equal( ser.serialize(DOM.get('test'), { getInner: true }), '<img title="test" class="test" src="tinymce/ui/img/raster.gif" ' + 'alt="test" border="0" /><span id="test2">test</span><hr />', 'Global rule' ); ser.setRules('*a[*],em/i[*],strong/b[*i*]'); DOM.setHTML('test', '<a href="test" data-mce-href="test">test</a><strong title="test" class="test">test2</strong><em title="test">test3</em>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<a href="test">test</a><strong title="test">test2</strong><em title="test">' + 'test3</em>', 'Wildcard rules'); ser.setRules('br,hr,input[type|name|value],div[id],span[id],strong/b,a,em/i,a[!href|!name],img[src|border=0|title={$uid}]'); DOM.setHTML('test', '<br /><hr /><input type="text" name="test" value="val" class="no" />' + '<span id="test2" class="no"><b class="no">abc</b><em class="no">123</em></span>123<a href="file.html" ' + 'data-mce-href="file.html">link</a><a name="anchor"></a><a>no</a><img src="tinymce/ui/img/raster.gif" ' + 'data-mce-src="tinymce/ui/img/raster.gif" />'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<div id="test"><br /><hr /><input type="text" name="test" value="val" />' + '<span id="test2"><strong>abc</strong><em>123</em></span>123<a href="file.html">link</a>' + '<a name="anchor"></a>no<img src="tinymce/ui/img/raster.gif" border="0" title="mce_0" /></div>', 'Output name and attribute rules'); ser.setRules('img[src|border=0|alt=]'); DOM.setHTML('test', '<img src="tinymce/ui/img/raster.gif" data-mce-src="tinymce/ui/img/raster.gif" border="0" alt="" />'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<img src="tinymce/ui/img/raster.gif" border="0" alt="" />', 'Default attribute with empty value'); ser.setRules('img[src|border=0|alt=],div[style|id],*[*]'); DOM.setHTML('test', '<img src="tinymce/ui/img/raster.gif" data-mce-src="tinymce/ui/img/raster.gif" /><hr />'); LegacyUnit.equal( ser.serialize(DOM.get('test'), { getInner: true }), '<img src="tinymce/ui/img/raster.gif" border="0" alt="" /><hr />' ); ser = Serializer({ valid_elements : 'img[src|border=0|alt=]', extended_valid_elements : 'div[id],img[src|alt=]' }); DOM.setHTML('test', '<img src="tinymce/ui/img/raster.gif" data-mce-src="tinymce/ui/img/raster.gif" alt="" />'); LegacyUnit.equal( ser.serialize(DOM.get('test')), '<div id="test"><img src="tinymce/ui/img/raster.gif" alt="" /></div>' ); ser = Serializer({ invalid_elements : 'hr,br' }); DOM.setHTML('test', '<img src="tinymce/ui/img/raster.gif" data-mce-src="tinymce/ui/img/raster.gif" /><hr /><br />'); LegacyUnit.equal( ser.serialize(DOM.get('test'), { getInner: true }), '<img src="tinymce/ui/img/raster.gif" />' ); }); suite.test('allow_unsafe_link_target (default)', function () { const ser = Serializer({ }); DOM.setHTML('test', '<a href="a" target="_blank">a</a><a href="b" target="_blank">b</a>'); LegacyUnit.equal( ser.serialize(DOM.get('test'), { getInner: true }), '<a href="a" target="_blank" rel="noopener">a</a><a href="b" target="_blank" rel="noopener">b</a>' ); DOM.setHTML('test', '<a href="a" rel="lightbox" target="_blank">a</a><a href="b" rel="lightbox" target="_blank">b</a>'); LegacyUnit.equal( ser.serialize(DOM.get('test'), { getInner: true }), '<a href="a" target="_blank" rel="lightbox noopener">a</a><a href="b" target="_blank" rel="lightbox noopener">b</a>' ); DOM.setHTML('test', '<a href="a" rel="lightbox x" target="_blank">a</a><a href="b" rel="lightbox x" target="_blank">b</a>'); LegacyUnit.equal( ser.serialize(DOM.get('test'), { getInner: true }), '<a href="a" target="_blank" rel="lightbox noopener x">a</a><a href="b" target="_blank" rel="lightbox noopener x">b</a>' ); DOM.setHTML('test', '<a href="a" rel="noopener a" target="_blank">a</a>'); LegacyUnit.equal( ser.serialize(DOM.get('test'), { getInner: true }), '<a href="a" target="_blank" rel="noopener a">a</a>' ); DOM.setHTML('test', '<a href="a" rel="a noopener b" target="_blank">a</a>'); LegacyUnit.equal( ser.serialize(DOM.get('test'), { getInner: true }), '<a href="a" target="_blank" rel="a noopener b">a</a>' ); }); suite.test('allow_unsafe_link_target (disabled)', function () { const ser = Serializer({ allow_unsafe_link_target: true }); DOM.setHTML('test', '<a href="a" target="_blank">a</a><a href="b" target="_blank">b</a>'); LegacyUnit.equal( ser.serialize(DOM.get('test'), { getInner: true }), '<a href="a" target="_blank">a</a><a href="b" target="_blank">b</a>' ); }); suite.test('format tree', function () { const ser = Serializer({ }); DOM.setHTML('test', 'a'); LegacyUnit.equal( ser.serialize(DOM.get('test'), { format: 'tree' }).name,<|fim▁hole|> suite.test('Entity encoding', function () { let ser; ser = Serializer({ entity_encoding : 'numeric' }); DOM.setHTML('test', '&lt;&gt;&amp;&quot;&nbsp;&aring;&auml;&ouml;'); LegacyUnit.equal(ser.serialize(DOM.get('test'), { getInner : true }), '&lt;&gt;&amp;"&#160;&#229;&#228;&#246;'); ser = Serializer({ entity_encoding : 'named' }); DOM.setHTML('test', '&lt;&gt;&amp;&quot;&nbsp;&aring;&auml;&ouml;'); LegacyUnit.equal(ser.serialize(DOM.get('test'), { getInner : true }), '&lt;&gt;&amp;"&nbsp;&aring;&auml;&ouml;'); ser = Serializer({ entity_encoding : 'named+numeric', entities : '160,nbsp,34,quot,38,amp,60,lt,62,gt' }); DOM.setHTML('test', '&lt;&gt;&amp;&quot;&nbsp;&aring;&auml;&ouml;'); LegacyUnit.equal(ser.serialize(DOM.get('test'), { getInner : true }), '&lt;&gt;&amp;"&nbsp;&#229;&#228;&#246;'); ser = Serializer({ entity_encoding : 'raw' }); DOM.setHTML('test', '&lt;&gt;&amp;&quot;&nbsp;&aring;&auml;&ouml;'); LegacyUnit.equal(ser.serialize(DOM.get('test'), { getInner : true }), '&lt;&gt;&amp;"\u00a0\u00e5\u00e4\u00f6'); }); suite.test('Form elements (general)', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules( 'form[method],label[for],input[type|name|value|checked|disabled|readonly|length|maxlength],select[multiple],' + 'option[value|selected],textarea[name|disabled|readonly]' ); DOM.setHTML('test', '<input type="text" />'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<input type="text" />'); DOM.setHTML('test', '<input type="text" value="text" length="128" maxlength="129" />'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<input type="text" value="text" length="128" maxlength="129" />'); DOM.setHTML('test', '<form method="post"><input type="hidden" name="method" value="get" /></form>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<form method="post"><input type="hidden" name="method" value="get" /></form>'); DOM.setHTML('test', '<label for="test">label</label>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<label for="test">label</label>'); DOM.setHTML('test', '<input type="checkbox" value="test" /><input type="button" /><textarea></textarea>'); // Edge will add an empty input value so remove that to normalize test since it doesn't break anything LegacyUnit.equal( ser.serialize(DOM.get('test')).replace(/ value=""/g, ''), '<input type="checkbox" value="test" /><input type="button" /><textarea></textarea>' ); }); suite.test('Form elements (checkbox)', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('form[method],label[for],input[type|name|value|checked|disabled|readonly|length|maxlength],select[multiple],option[value|selected]'); DOM.setHTML('test', '<input type="checkbox" value="1">'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<input type="checkbox" value="1" />'); DOM.setHTML('test', '<input type="checkbox" value="1" checked disabled readonly>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<input type="checkbox" value="1" checked="checked" disabled="disabled" readonly="readonly" />'); DOM.setHTML('test', '<input type="checkbox" value="1" checked="1" disabled="1" readonly="1">'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<input type="checkbox" value="1" checked="checked" disabled="disabled" readonly="readonly" />'); DOM.setHTML('test', '<input type="checkbox" value="1" checked="true" disabled="true" readonly="true">'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<input type="checkbox" value="1" checked="checked" disabled="disabled" readonly="readonly" />'); }); suite.test('Form elements (select)', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('form[method],label[for],input[type|name|value|checked|disabled|readonly|length|maxlength],select[multiple],option[value|selected]'); DOM.setHTML('test', '<select><option value="1">test1</option><option value="2" selected>test2</option></select>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<select><option value="1">test1</option><option value="2" selected="selected">test2</option></select>'); DOM.setHTML('test', '<select><option value="1">test1</option><option selected="1" value="2">test2</option></select>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<select><option value="1">test1</option><option value="2" selected="selected">test2</option></select>'); DOM.setHTML('test', '<select><option value="1">test1</option><option value="2" selected="true">test2</option></select>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<select><option value="1">test1</option><option value="2" selected="selected">test2</option></select>'); DOM.setHTML('test', '<select multiple></select>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<select multiple="multiple"></select>'); DOM.setHTML('test', '<select multiple="multiple"></select>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<select multiple="multiple"></select>'); DOM.setHTML('test', '<select multiple="1"></select>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<select multiple="multiple"></select>'); DOM.setHTML('test', '<select></select>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<select></select>'); }); suite.test('List elements', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('ul[compact],ol,li'); DOM.setHTML('test', '<ul compact></ul>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<ul compact="compact"></ul>'); DOM.setHTML('test', '<ul compact="compact"></ul>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<ul compact="compact"></ul>'); DOM.setHTML('test', '<ul compact="1"></ul>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<ul compact="compact"></ul>'); DOM.setHTML('test', '<ul></ul>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<ul></ul>'); DOM.setHTML('test', '<ol><li>a</li><ol><li>b</li><li>c</li></ol><li>e</li></ol>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<ol><li>a<ol><li>b</li><li>c</li></ol></li><li>e</li></ol>'); }); suite.test('Tables', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('table,tr,td[nowrap]'); DOM.setHTML('test', '<table><tr><td></td></tr></table>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<table><tr><td></td></tr></table>'); DOM.setHTML('test', '<table><tr><td nowrap></td></tr></table>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<table><tr><td nowrap="nowrap"></td></tr></table>'); DOM.setHTML('test', '<table><tr><td nowrap="nowrap"></td></tr></table>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<table><tr><td nowrap="nowrap"></td></tr></table>'); DOM.setHTML('test', '<table><tr><td nowrap="1"></td></tr></table>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<table><tr><td nowrap="nowrap"></td></tr></table>'); }); suite.test('Styles', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('*[*]'); DOM.setHTML('test', '<span style="border: 1px solid red" data-mce-style="border: 1px solid red;">test</span>'); LegacyUnit.equal(ser.serialize(DOM.get('test'), { getInner: true }), '<span style="border: 1px solid red;">test</span>'); }); suite.test('Comments', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('*[*]'); DOM.setHTML('test', '<!-- abc -->'); LegacyUnit.equal(ser.serialize(DOM.get('test'), { getInner: true }), '<!-- abc -->'); }); suite.test('Non HTML elements and attributes', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('*[*]'); ser.schema.addValidChildren('+div[prefix:test]'); DOM.setHTML('test', '<div test:attr="test">test</div>'); LegacyUnit.equal(ser.serialize(DOM.get('test'), { getInner : true }), '<div test:attr="test">test</div>'); DOM.setHTML('test', 'test1<prefix:test>Test</prefix:test>test2'); LegacyUnit.equal(ser.serialize(DOM.get('test'), { getInner: true }), 'test1<prefix:test>Test</prefix:test>test2'); }); suite.test('Padd empty elements', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('#p'); DOM.setHTML('test', '<p>test</p><p></p>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<p>test</p><p>&nbsp;</p>'); }); suite.test('Padd empty elements with BR', function () { const ser = Serializer({ padd_empty_with_br: true }); ser.setRules('#p,table,tr,#td,br'); DOM.setHTML('test', '<p>a</p><p></p>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<p>a</p><p><br /></p>'); DOM.setHTML('test', '<p>a</p><table><tr><td><br></td></tr></table>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<p>a</p><table><tr><td><br /></td></tr></table>'); }); suite.test('Do not padd empty elements with padded children', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('#p,#span,b'); DOM.setHTML('test', '<p><span></span></p><p><b><span></span></b></p>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<p><span>&nbsp;</span></p><p><b><span>&nbsp;</span></b></p>'); }); suite.test('Remove empty elements', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('-p'); DOM.setHTML('test', '<p>test</p><p></p>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<p>test</p>'); }); suite.test('Script with non JS type attribute', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<s' + 'cript type="mylanguage"></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<s' + 'cript type="mylanguage"></s' + 'cript>'); }); suite.test('Script with tags inside a comment with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<s' + 'cript>// <img src="test"><a href="#"></a></s' + 'cript>'); LegacyUnit.equal( ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<s' + 'cript>// <![CDATA[\n// <img src="test"><a href="#"></a>\n// ]]></s' + 'cript>' ); }); suite.test('Script with tags inside a comment', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<s' + 'cript>// <img src="test"><a href="#"></a></s' + 'cript>'); LegacyUnit.equal( ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<s' + 'cript>// <img src="test"><a href="#"></a></s' + 'cript>' ); }); suite.test('Script with less than with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<s' + 'cript>1 < 2;</s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<s' + 'cript>// <![CDATA[\n1 < 2;\n// ]]></s' + 'cript>'); }); suite.test('Script with less than', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<s' + 'cript>1 < 2;</s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<s' + 'cript>1 < 2;</s' + 'cript>'); }); suite.test('Script with type attrib and less than with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<s' + 'cript type="text/javascript">1 < 2;</s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<script type="text/javascript">// <![CDATA[\n1 < 2;\n// ]]></s' + 'cript>'); }); suite.test('Script with type attrib and less than', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<s' + 'cript type="text/javascript">1 < 2;</s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<script type=\"text/javascript\">1 < 2;</script>'); }); suite.test('Script with whitespace in beginning/end with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>\n\t1 < 2;\n\t if (2 < 1)\n\t\talert(1);\n</s' + 'cript>'); LegacyUnit.equal( ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<s' + 'cript>// <![CDATA[\n\t1 < 2;\n\t if (2 < 1)\n\t\talert(1);\n// ]]></s' + 'cript>' ); }); suite.test('Script with whitespace in beginning/end', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>\n\t1 < 2;\n\t if (2 < 1)\n\t\talert(1);\n</s' + 'cript>'); LegacyUnit.equal( ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<script>\n\t1 < 2;\n\t if (2 < 1)\n\t\talert(1);\n</script>' ); }); suite.test('Script with a HTML comment and less than with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script><!-- 1 < 2; // --></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<s' + 'cript>// <![CDATA[\n1 < 2;\n// ]]></s' + 'cript>'); }); suite.test('Script with a HTML comment and less than', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script><!-- 1 < 2; // --></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<script><!-- 1 < 2; // --></script>'); }); suite.test('Script with white space in beginning, comment and less than with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>\n\n<!-- 1 < 2;\n\n--></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<s' + 'cript>// <![CDATA[\n1 < 2;\n// ]]></s' + 'cript>'); }); suite.test('Script with white space in beginning, comment and less than', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>\n\n<!-- 1 < 2;\n\n--></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<script>\n\n<!-- 1 < 2;\n\n--></script>'); }); suite.test('Script with comments and cdata with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>// <![CDATA[1 < 2; // ]]></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<s' + 'cript>// <![CDATA[\n1 < 2;\n// ]]></s' + 'cript>'); }); suite.test('Script with comments and cdata', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>// <![CDATA[1 < 2; // ]]></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<script>// <![CDATA[1 < 2; // ]]></script>'); }); suite.test('Script with cdata with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script><![CDATA[1 < 2; ]]></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<s' + 'cript>// <![CDATA[\n1 < 2;\n// ]]></s' + 'cript>'); }); suite.test('Script with cdata', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script><![CDATA[1 < 2; ]]></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<script><![CDATA[1 < 2; ]]></script>'); }); suite.test('Script whitespace in beginning/end and cdata with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>\n\n<![CDATA[\n\n1 < 2;\n\n]]>\n\n</s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<s' + 'cript>// <![CDATA[\n1 < 2;\n// ]]></s' + 'cript>'); }); suite.test('Script whitespace in beginning/end and cdata', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>\n\n<![CDATA[\n\n1 < 2;\n\n]]>\n\n</s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<script>\n\n<![CDATA[\n\n1 < 2;\n\n]]>\n\n</script>'); }); suite.test('Whitespace preserve in pre', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('pre'); DOM.setHTML('test', '<pre> </pre>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<pre> </pre>'); }); suite.test('Script with src attr', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script src="test.js" data-mce-src="test.js"></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<s' + 'cript src="test.js"></s' + 'cript>'); }); suite.test('Script with HTML comment, comment and CDATA with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script><!--// <![CDATA[var hi = "hello";// ]]>--></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<script>// <![CDATA[\nvar hi = \"hello\";\n// ]]></s' + 'cript>'); }); suite.test('Script with HTML comment, comment and CDATA', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script><!--// <![CDATA[var hi = "hello";// ]]>--></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<script><!--// <![CDATA[var hi = \"hello\";// ]]>--></script>'); }); suite.test('Script with block comment around cdata with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>/* <![CDATA[ */\nvar hi = "hello";\n/* ]]> */</s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<script>// <![CDATA[\nvar hi = \"hello\";\n// ]]></s' + 'cript>'); }); suite.test('Script with block comment around cdata', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>/* <![CDATA[ */\nvar hi = "hello";\n/* ]]> */</s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<script>/* <![CDATA[ */\nvar hi = \"hello\";\n/* ]]> */</script>'); }); suite.test('Script with html comment and block comment around cdata with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script><!-- /* <![CDATA[ */\nvar hi = "hello";\n/* ]]>*/--></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<script>// <![CDATA[\nvar hi = \"hello\";\n// ]]></s' + 'cript>'); }); suite.test('Script with html comment and block comment around cdata', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script><!-- /* <![CDATA[ */\nvar hi = "hello";\n/* ]]>*/--></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<script><!-- /* <![CDATA[ */\nvar hi = \"hello\";\n/* ]]>*/--></script>'); }); suite.test('Script with line comment and html comment with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>// <!--\nvar hi = "hello";\n// --></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<script>// <![CDATA[\nvar hi = \"hello\";\n// ]]></s' + 'cript>'); }); suite.test('Script with line comment and html comment', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>// <!--\nvar hi = "hello";\n// --></s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<script>// <!--\nvar hi = \"hello\";\n// --></script>'); }); suite.test('Script with block comment around html comment with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, element_format: 'xhtml' }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>/* <!-- */\nvar hi = "hello";\n/*-->*/</s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<script>// <![CDATA[\nvar hi = \"hello\";\n// ]]></s' + 'cript>'); }); suite.test('Script with block comment around html comment', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('script[type|language|src]'); DOM.setHTML('test', '<script>/* <!-- */\nvar hi = "hello";\n/*-->*/</s' + 'cript>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<script>/* <!-- */\nvar hi = \"hello\";\n/*-->*/</script>'); }); suite.test('Protected blocks', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('noscript[test]'); DOM.setHTML('test', '<!--mce:protected ' + escape('<noscript test="test"><br></noscript>') + '-->'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<noscript test="test"><br></noscript>'); DOM.setHTML('test', '<!--mce:protected ' + escape('<noscript><br></noscript>') + '-->'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<noscript><br></noscript>'); DOM.setHTML('test', '<!--mce:protected ' + escape('<noscript><!-- text --><br></noscript>') + '-->'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<noscript><!-- text --><br></noscript>'); }); suite.test('Style with whitespace at beginning with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, valid_children: '+body[style]', element_format: 'xhtml' }); ser.setRules('style'); DOM.setHTML('test', '<style> body { background:#fff }</style>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<style><!--\n body { background:#fff }\n--></style>'); }); suite.test('Style with whitespace at beginning', function () { const ser = Serializer({ fix_list_elements : true, valid_children: '+body[style]' }); ser.setRules('style'); DOM.setHTML('test', '<style> body { background:#fff }</style>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<style> body { background:#fff }</style>'); }); suite.test('Style with cdata with element_format: xhtml', function () { const ser = Serializer({ fix_list_elements : true, valid_children: '+body[style]', element_format: 'xhtml' }); ser.setRules('style'); DOM.setHTML('test', '<style>\r\n<![CDATA[\r\n body { background:#fff }]]></style>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<style><!--\nbody { background:#fff }\n--></style>'); }); suite.test('Style with cdata', function () { const ser = Serializer({ fix_list_elements : true, valid_children: '+body[style]' }); ser.setRules('style'); DOM.setHTML('test', '<style>\r\n<![CDATA[\r\n body { background:#fff }]]></style>'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '<style>\n<![CDATA[\n body { background:#fff }]]></style>'); }); suite.test('CDATA', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('span'); DOM.setHTML('test', '123<!--[CDATA[<test>]]-->abc'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '123<![CDATA[<test>]]>abc'); DOM.setHTML('test', '123<!--[CDATA[<te\n\nst>]]-->abc'); LegacyUnit.equal(ser.serialize(DOM.get('test')).replace(/\r/g, ''), '123<![CDATA[<te\n\nst>]]>abc'); }); suite.test('BR at end of blocks', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('ul,li,br'); DOM.setHTML('test', '<ul><li>test<br /></li><li>test<br /></li><li>test<br /></li></ul>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<ul><li>test</li><li>test</li><li>test</li></ul>'); }); suite.test('Map elements', function () { const ser = Serializer({ fix_list_elements : true }); ser.setRules('map[id|name],area[shape|coords|href|target|alt]'); DOM.setHTML( 'test', '<map id="planetmap" name="planetmap"><area shape="rect" coords="0,0,82,126" href="sun.htm" data-mce-href="sun.htm" target="_blank" alt="sun" /></map>' ); LegacyUnit.equal( ser.serialize(DOM.get('test')).toLowerCase(), '<map id="planetmap" name="planetmap"><area shape="rect" coords="0,0,82,126" href="sun.htm" target="_blank" alt="sun" /></map>' ); }); suite.test('Custom elements', function () { const ser = Serializer({ custom_elements: 'custom1,~custom2', valid_elements: 'custom1,custom2' }); document.createElement('custom1'); document.createElement('custom2'); DOM.setHTML('test', '<p><custom1>c1</custom1><custom2>c2</custom2></p>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<custom1>c1</custom1><custom2>c2</custom2>'); }); suite.test('Remove internal classes', function () { const ser = Serializer({ valid_elements: 'span[class]' }); DOM.setHTML('test', '<span class="a mce-item-X mce-item-selected b"></span>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<span class="a b"></span>'); DOM.setHTML('test', '<span class="a mce-item-X"></span>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<span class="a"></span>'); DOM.setHTML('test', '<span class="mce-item-X"></span>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<span></span>'); DOM.setHTML('test', '<span class="mce-item-X b"></span>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<span class=" b"></span>'); DOM.setHTML('test', '<span class="b mce-item-X"></span>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<span class="b"></span>'); }); suite.test('Restore tabindex', function () { const ser = Serializer({ valid_elements: 'span[tabindex]' }); DOM.setHTML('test', '<span data-mce-tabindex="42"></span>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<span tabindex="42"></span>'); }); suite.test('Trailing BR (IE11)', function () { const ser = Serializer({ valid_elements: 'p,br' }); DOM.setHTML('test', '<p>a</p><br><br>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), '<p>a</p>'); DOM.setHTML('test', 'a<br><br>'); LegacyUnit.equal(ser.serialize(DOM.get('test')), 'a'); }); suite.test('addTempAttr', function () { const ser = Serializer({}); ser.addTempAttr('data-x'); ser.addTempAttr('data-y'); DOM.setHTML('test', '<p data-x="1" data-y="2" data-z="3">a</p>'); LegacyUnit.equal(ser.serialize(DOM.get('test'), { getInner: 1 }), '<p data-z="3">a</p>'); LegacyUnit.equal(TrimHtml.trimExternal(ser, '<p data-x="1" data-y="2" data-z="3">a</p>'), '<p data-z="3">a</p>'); }); suite.test('addTempAttr same attr twice', function () { const ser1 = Serializer({}); const ser2 = Serializer({}); ser1.addTempAttr('data-x'); ser2.addTempAttr('data-x'); DOM.setHTML('test', '<p data-x="1" data-z="3">a</p>'); LegacyUnit.equal(ser1.serialize(DOM.get('test'), { getInner: 1 }), '<p data-z="3">a</p>'); LegacyUnit.equal(TrimHtml.trimExternal(ser1, '<p data-x="1" data-z="3">a</p>'), '<p data-z="3">a</p>'); LegacyUnit.equal(ser2.serialize(DOM.get('test'), { getInner: 1 }), '<p data-z="3">a</p>'); LegacyUnit.equal(TrimHtml.trimExternal(ser2, '<p data-x="1" data-z="3">a</p>'), '<p data-z="3">a</p>'); }); suite.test('trim data-mce-bougs="all"', function () { const ser = Serializer({}); DOM.setHTML('test', 'a<p data-mce-bogus="all">b</p>c'); LegacyUnit.equal(ser.serialize(DOM.get('test'), { getInner: 1 }), 'ac'); LegacyUnit.equal(TrimHtml.trimExternal(ser, 'a<p data-mce-bogus="all">b</p>c'), 'ac'); }); suite.test('zwsp should not be treated as contents', function () { const ser = Serializer({ }); DOM.setHTML('test', '<p>' + Zwsp.ZWSP + '</p>'); LegacyUnit.equal( ser.serialize(DOM.get('test'), { getInner: true }), '<p>&nbsp;</p>' ); }); viewBlock.attach(); viewBlock.get().id = 'test'; Pipeline.async({}, addTeardown(suite.toSteps({})), function () { viewBlock.detach(); success(); }, failure); });<|fim▁end|>
'body' ); });
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var express = require('express'); var router = express.Router(); /** * 获得主页内容<|fim▁hole|> * */ router.get('/', function(req, res, next) { res.render('user/index', { title: 'Express' }); }); module.exports = router;<|fim▁end|>
<|file_name|>ImageConverter.hpp<|end_file_name|><|fim▁begin|>#ifndef IMAGECONVERTER_HPP #define IMAGECONVERTER_HPP #include <QImage> #include <QDebug> #include <opencv2/core.hpp><|fim▁hole|>#include <opencv2/imgproc.hpp> #include "Logger.hpp" class ImageConverter { public: ImageConverter() = default; static QImage Mat2QImage(const cv::Mat &cvImage); static cv::Mat QImage2Mat(const QImage& image); }; #endif // IMAGECONVERTER_HPP<|fim▁end|>
<|file_name|>codemirror.component.js<|end_file_name|><|fim▁begin|>/* Misojs Codemirror component */ var m = require('mithril'), basePath = "external/codemirror/", pjson = require("./package.json"); // Here we have a few fixes to make CM work in node - we only setup each, // if they don't already exist, otherwise we would override the browser global.document = global.document || {}; global.document.createElement = global.document.createElement || function(){ return { setAttribute: function(){} }; }; global.window = global.window || {}; global.window.getSelection = global.window.getSelection || function(){ return false; }; global.navigator = global.navigator || {}; global.navigator.userAgent = global.navigator.userAgent || "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36"; // Grab code mirror and the javascript language // Note: you cannot dynamically require with browserify, // so we must get whatever modes we need here. // If you need other languages, simply equire them statically in your program. var CodeMirror = require('codemirror'); require("codemirror/mode/javascript/javascript.js"); require("codemirror/mode/htmlmixed/htmlmixed.js"); require("codemirror/mode/css/css.js"); // Our component var CodemirrorComponent = { // Returns a textarea view: function(ctrl, attrs) { return m("div", [ // It is ok to include CSS here - the browser will cache it, // though a more ideal setup would be the ability to load only // once when required. m("LINK", { href: basePath + "lib/codemirror.css", rel: "stylesheet"}), m("textarea", {config: CodemirrorComponent.config(attrs)}, attrs.value()) ]); }, config: function(attrs) { return function(element, isInitialized) { if(typeof CodeMirror !== 'undefined') { if (!isInitialized) { var editor = CodeMirror.fromTextArea(element, { lineNumbers: true }); editor.on("change", function(instance, object) { m.startComputation(); attrs.value(editor.doc.getValue()); if (typeof attrs.onchange == "function"){ attrs.onchange(instance, object); } m.endComputation();<|fim▁hole|> } }; } }; // Allow the user to pass in arguments when loading. module.exports = function(args){ if(args && args.basePath) { basePath = args.basePath; } return CodemirrorComponent; };<|fim▁end|>
}); } } else { console.warn('ERROR: You need Codemirror in the page');
<|file_name|>init_models.py<|end_file_name|><|fim▁begin|><|fim▁hole|> from .pagemodels import * from .catalogmodels import * from .utilmodels import * from .usermodels import * from .dbconnect import Base, engine def init_models(): Base.metadata.create_all(engine)<|fim▁end|>
# -*- coding: utf-8 -*-
<|file_name|>_symmetric.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators class SymmetricValidator(_plotly_utils.basevalidators.BooleanValidator): def __init__(<|fim▁hole|> super(SymmetricValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "calc"), role=kwargs.pop("role", "info"), **kwargs )<|fim▁end|>
self, plotly_name="symmetric", parent_name="scatter3d.error_y", **kwargs ):
<|file_name|>06.py<|end_file_name|><|fim▁begin|># THREE GOLD STARS # Sudoku [http://en.wikipedia.org/wiki/Sudoku] # is a logic puzzle where a game # is defined by a partially filled # 9 x 9 square of digits where each square # contains one of the digits 1,2,3,4,5,6,7,8,9. # For this question we will generalize # and simplify the game. # Define a procedure, check_sudoku, # that takes as input a square list # of lists representing an n x n # sudoku puzzle solution and returns the boolean # True if the input is a valid # sudoku square and returns the boolean False # otherwise. # A valid sudoku square satisfies these # two properties: # 1. Each column of the square contains # each of the whole numbers from 1 to n exactly once. # 2. Each row of the square contains each # of the whole numbers from 1 to n exactly once. # You may assume the the input is square and contains at # least one row and column. correct = [[1,2,3], [2,3,1], [3,1,2]] incorrect = [[1,2,3,4], [2,3,1,3], [3,1,2,3], [4,4,4,4]] incorrect2 = [[1,2,3,4], [2,3,1,4], [4,1,2,3], [3,4,1,2]] incorrect3 = [[1,2,3,4,5], [2,3,1,5,6],<|fim▁hole|>incorrect4 = [['a','b','c'], ['b','c','a'], ['c','a','b']] incorrect5 = [ [1, 1.5], [1.5, 1]] def check_sudoku(grid): for ii, row in enumerate(grid): column = [] for jj in range(len(row)): column.append(grid[jj][ii]) for jj in range(len(grid)): if jj+1 not in row or jj+1 not in column: return False return True print check_sudoku(incorrect) #>>> False print check_sudoku(correct) #>>> True print check_sudoku(incorrect2) #>>> False print check_sudoku(incorrect3) #>>> False print check_sudoku(incorrect4) #>>> False print check_sudoku(incorrect5) #>>> False<|fim▁end|>
[4,5,2,1,3], [3,4,5,2,1], [5,6,4,3,2]]
<|file_name|>into_iter.rs<|end_file_name|><|fim▁begin|>#![feature(core)] extern crate core; #[cfg(test)] mod tests { use core::option::IntoIter; // #[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)] // #[stable(feature = "rust1", since = "1.0.0")] // pub enum Option<T> { // /// No value // #[stable(feature = "rust1", since = "1.0.0")] // None, // /// Some value `T` // #[stable(feature = "rust1", since = "1.0.0")] // Some(T) // } // impl<T> IntoIterator for Option<T> { // type Item = T; // type IntoIter = IntoIter<T>; // // /// Returns a consuming iterator over the possibly contained value. // /// // /// # Examples // /// // /// ``` // /// let x = Some("string"); // /// let v: Vec<&str> = x.into_iter().collect(); // /// assert_eq!(v, ["string"]); // /// // /// let x = None; // /// let v: Vec<&str> = x.into_iter().collect(); // /// assert!(v.is_empty()); // /// ``` // #[inline] // fn into_iter(self) -> IntoIter<T> { // IntoIter { inner: Item { opt: self } } // } // } type T = &'static str; #[test] fn into_iter_test1() { let x: Option<T> = Some::<T>("string"); let mut into_iter: IntoIter<T> = x.into_iter(); let result: Option<T> = into_iter.next(); match result { Some(v) => assert_eq!(v, "string"), None => assert!(false) } }<|fim▁hole|> let mut n: usize = 0; for v in x { n += 1; assert_eq!(v, "string"); } assert_eq!(n, 1); } #[test] fn into_iter_test3() { let x: Option<T> = None::<T>; let mut into_iter: IntoIter<T> = x.into_iter(); let result: Option<T> = into_iter.next(); match result { Some(_) => assert!(false), None => assert!(true) } } #[test] fn into_iter_test4() { let x: Option<T> = None::<T>; let mut n: usize = 0; for v in x { n += 1; assert_eq!(v, "string"); } assert_eq!(n, 0); } }<|fim▁end|>
#[test] fn into_iter_test2() { let x: Option<T> = Some::<T>("string");
<|file_name|>CopyRange.js<|end_file_name|><|fim▁begin|>/* * @brief ajax * * @file Group.js * * Copyright (C) 2006-2009 Jedox AG * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License (Version 2) as published * by the Free Software Foundation at http://www.gnu.org/copyleft/gpl.html. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along with * this program; if not, write to the Free Software Foundation, Inc., 59 Temple * Place, Suite 330, Boston, MA 02111-1307 USA * * You may obtain a copy of the License at * * <a href="http://www.jedox.com/license_palo_bi_suite.txt"> * http://www.jedox.com/license_palo_bi_suite.txt * </a> * * If you are developing and distributing open source applications under the * GPL License, then you are free to use Palo under the GPL License. For OEMs, * ISVs, and VARs who distribute Palo with their products, and do not license * and distribute their source code under the GPL, Jedox provides a flexible * OEM Commercial License. * * \author * Drazen Kljajic <[email protected]> * * \version * SVN: $Id: CopyRange.js 4776 2011-03-28 14:25:45Z predragm $ * */ Jedox.wss.grid.CopyRange = (function () { // private static fields <|fim▁hole|> return function (selection, startPoint, endPoint) { Jedox.wss.grid.CopyRange.parent.constructor.call(this, selection, startPoint, endPoint); // private fields // private methods // public fields // privileged methods // constructor code var that = this, panesLen = this._panes.length, htmlEl, htmlElCp; // Init presentation. // Add html elements for each line in an range: for(var clsName = 'formularRangeBorder', i = 3; i >= 0; --i) { htmlEl = document.createElement('div'); htmlEl.className = clsName; for (var j = panesLen - 1; j >= 0; j--) { htmlElCp = j > 0 ? htmlEl.cloneNode(true) : htmlEl; this._edgeElems[j][i] = htmlElCp; this._containers[j].appendChild(htmlElCp); } } } } )(); // CopyRange extends Range Jedox.util.extend(Jedox.wss.grid.CopyRange, Jedox.wss.grid.Range); // public static methods // public methods clsRef = Jedox.wss.grid.CopyRange; clsRef = null;<|fim▁end|>
// private static methods // class constructor
<|file_name|>0002_person_token.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-03-26 10:54 from __future__ import unicode_literals from django.db import migrations, models import uuid class Migration(migrations.Migration): dependencies = [ ('news', '0001_initial'), ] operations = [ migrations.AddField( model_name='person', name='token', field=models.UUIDField(default=uuid.uuid4, editable=False, null=True),<|fim▁hole|><|fim▁end|>
), ]
<|file_name|>ForceFocusHook.ts<|end_file_name|><|fim▁begin|>import {Ref, useEffect, useRef} from 'react' export function useForceFocus<T extends HTMLElement>(): Ref<T> { const el = useRef<T>(null) <|fim▁hole|> } }, []) return el }<|fim▁end|>
useEffect(() => { if (el.current) { el.current.focus()
<|file_name|>tool_extrude.py<|end_file_name|><|fim▁begin|># tool_extrude.py # Extrusion tool. # Copyright (c) 2015, Lennart Riecken # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from PySide import QtGui, QtCore from tool import Tool, EventData, MouseButtons, KeyModifiers, Face from plugin_api import register_plugin class ExtrudeTool(Tool): def __init__(self, api): super(ExtrudeTool, self).__init__(api) # Create our action / icon self.action = QtGui.QAction(QtGui.QPixmap(":/images/gfx/icons/border-bottom-thick.png"), "Extrude", None) self.action.setStatusTip("Extude region") self.action.setCheckable(True) self.action.setShortcut(QtGui.QKeySequence("Ctrl+0")) # Register the tool self.priority = 10 self.api.register_tool(self) # Area tool helper self._mouse = None self._stamp = [] self.xdir = True self.ydir = True self.zdir = True self.pastoffset = 0 self.fixeddirection = False def drawstamp(self, data, dx, dy, dz): for x, y, z, col in self._stamp: tgt = data.voxels.get(x + dx, y + dy, z + dz) if tgt == 0: data.voxels.set(x + dx, y + dy, z + dz, col, True, 1) data.voxels.completeUndoFill() def on_drag_start(self, data): if len(data.voxels._selection) > 0: self._stamp = [] for x, y, z in data.voxels._selection: col = data.voxels.get(x, y, z) self._stamp.append((x, y, z, col)) self._mouse = (data.mouse_x, data.mouse_y) if QtCore.Qt.Key_X in data.keys: self.xdir = True self.ydir = False self.zdir = False self.fixeddirection = True elif QtCore.Qt.Key_Y in data.keys: self.xdir = False self.ydir = True self.zdir = False self.fixeddirection = True elif QtCore.Qt.Key_Z in data.keys: self.xdir = False self.ydir = False self.zdir = True self.fixeddirection = True else: self.xdir = True self.ydir = True self.zdir = True self.fixeddirection = False self.pastoffset = 0 # When dragging, create the selection def on_drag(self, data): # In case the first click has missed a valid target. if self._mouse is None or len(self._stamp) == 0: return dx = data.mouse_x - self._mouse[0] dy = data.mouse_y - self._mouse[1] # Work out some sort of vague translation between screen and voxels sx = self.api.mainwindow.width() / data.voxels.width sy = self.api.mainwindow.height() / data.voxels.height dx = int(round(dx / float(sx))) dy = int(round(dy / float(sy))) if dx == 0 and dy == 0: return # Work out translation for x,y ax, ay = self.api.mainwindow.display.view_axis() tx = 0 ty = 0 tz = 0 tdx = 0 tdy = 0 tdz = 0 if ax == self.api.mainwindow.display.X_AXIS: tdx = dx if dx > 0: tx = 1 elif dx < 0: tx = -1 elif ax == self.api.mainwindow.display.Y_AXIS: tdy = dx if dx > 0:<|fim▁hole|> tdz = dx if dx > 0: tz = 1 elif dx < 0: tz = -1 if ay == self.api.mainwindow.display.X_AXIS: tdx = dy if dy > 0: tx = 1 elif dy < 0: tx = -1 elif ay == self.api.mainwindow.display.Y_AXIS: tdy = dy if dy > 0: ty = -1 elif dy < 0: ty = 1 elif ay == self.api.mainwindow.display.Z_AXIS: tdz = dy if dy > 0: tz = 1 elif dy < 0: tz = -1 if self.fixeddirection: if self.xdir: if tx != 0: self._mouse = (data.mouse_x, data.mouse_y) self.pastoffset += tx self.drawstamp(data, self.pastoffset, 0, 0) elif self.ydir: if ty != 0: self._mouse = (data.mouse_x, data.mouse_y) self.pastoffset += ty self.drawstamp(data, 0, self.pastoffset, 0) elif self.zdir: if tz != 0: self._mouse = (data.mouse_x, data.mouse_y) self.pastoffset += tz self.drawstamp(data, 0, 0, self.pastoffset) else: if tx != 0 and self.xdir and (not self.ydir or (abs(tdx) > abs(tdy) and abs(tdx) > abs(tdz))): self._mouse = (data.mouse_x, data.mouse_y) self.ydir = False self.zdir = False self.pastoffset += tx self.drawstamp(data, self.pastoffset, 0, 0) elif ty != 0 and self.ydir and (not self.zdir or abs(tdy) > abs(tdz)): self._mouse = (data.mouse_x, data.mouse_y) self.xdir = False self.zdir = False self.pastoffset += ty self.drawstamp(data, 0, self.pastoffset, 0) elif tz != 0 and self.zdir: self._mouse = (data.mouse_x, data.mouse_y) self.xdir = False self.ydir = False self.pastoffset += tz self.drawstamp(data, 0, 0, self.pastoffset) def on_drag_end(self, data): data.voxels.clear_selection() dx = self.pastoffset if self.xdir else 0 dy = self.pastoffset if self.ydir else 0 dz = self.pastoffset if self.zdir else 0 for x, y, z, col in self._stamp: data.voxels.select(x + dx, y + dy, z + dz) register_plugin(ExtrudeTool, "Extrude Tool", "1.0")<|fim▁end|>
ty = 1 elif dx < 0: ty = -1 elif ax == self.api.mainwindow.display.Z_AXIS:
<|file_name|>ringbuf.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A double-ended queue implemented as a circular buffer. //! //! `RingBuf` implements the trait `Deque`. It should be imported with //! `use collections::Deque`. use core::prelude::*; use core::cmp; use core::default::Default; use core::fmt; use core::iter; use std::hash::{Writer, Hash}; use {Deque, Mutable, MutableSeq}; use vec::Vec; static INITIAL_CAPACITY: uint = 8u; // 2^3 static MINIMUM_CAPACITY: uint = 2u; /// `RingBuf` is a circular buffer that implements `Deque`. #[deriving(Clone)] pub struct RingBuf<T> { nelts: uint, lo: uint, elts: Vec<Option<T>> } impl<T> Collection for RingBuf<T> { /// Returns the number of elements in the `RingBuf`. fn len(&self) -> uint { self.nelts } } impl<T> Mutable for RingBuf<T> { /// Clears the `RingBuf`, removing all values. fn clear(&mut self) { for x in self.elts.mut_iter() { *x = None } self.nelts = 0; self.lo = 0; } } impl<T> Deque<T> for RingBuf<T> { /// Returns a reference to the first element in the `RingBuf`. fn front<'a>(&'a self) -> Option<&'a T> { if self.nelts > 0 { Some(&self[0]) } else { None } } /// Returns a mutable reference to the first element in the `RingBuf`. fn front_mut<'a>(&'a mut self) -> Option<&'a mut T> { if self.nelts > 0 { Some(self.get_mut(0)) } else { None } } /// Returns a reference to the last element in the `RingBuf`. fn back<'a>(&'a self) -> Option<&'a T> { if self.nelts > 0 { Some(&self[self.nelts - 1]) } else { None } } /// Returns a mutable reference to the last element in the `RingBuf`. fn back_mut<'a>(&'a mut self) -> Option<&'a mut T> { let nelts = self.nelts; if nelts > 0 { Some(self.get_mut(nelts - 1)) } else { None } } /// Removes and returns the first element in the `RingBuf`, or `None` if it /// is empty. fn pop_front(&mut self) -> Option<T> { let result = self.elts.get_mut(self.lo).take(); if result.is_some() { self.lo = (self.lo + 1u) % self.elts.len(); self.nelts -= 1u; } result } /// Prepends an element to the `RingBuf`. fn push_front(&mut self, t: T) { if self.nelts == self.elts.len() { grow(self.nelts, &mut self.lo, &mut self.elts); } if self.lo == 0u { self.lo = self.elts.len() - 1u; } else { self.lo -= 1u; } *self.elts.get_mut(self.lo) = Some(t); self.nelts += 1u; } } impl<T> MutableSeq<T> for RingBuf<T> { fn push(&mut self, t: T) { if self.nelts == self.elts.len() { grow(self.nelts, &mut self.lo, &mut self.elts); } let hi = self.raw_index(self.nelts); *self.elts.get_mut(hi) = Some(t); self.nelts += 1u; } fn pop(&mut self) -> Option<T> { if self.nelts > 0 { self.nelts -= 1; let hi = self.raw_index(self.nelts); self.elts.get_mut(hi).take() } else { None } } } impl<T> Default for RingBuf<T> { #[inline] fn default() -> RingBuf<T> { RingBuf::new() } } impl<T> RingBuf<T> { /// Creates an empty `RingBuf`. pub fn new() -> RingBuf<T> { RingBuf::with_capacity(INITIAL_CAPACITY) } /// Creates an empty `RingBuf` with space for at least `n` elements. pub fn with_capacity(n: uint) -> RingBuf<T> { RingBuf{nelts: 0, lo: 0, elts: Vec::from_fn(cmp::max(MINIMUM_CAPACITY, n), |_| None)} } /// Retrieva an element in the `RingBuf` by index. /// /// Fails if there is no element with the given index. /// /// # Example /// /// ```rust /// #![allow(deprecated)] /// /// use std::collections::RingBuf; /// /// let mut buf = RingBuf::new(); /// buf.push(3i); /// buf.push(4); /// buf.push(5); /// assert_eq!(buf.get(1), &4); /// ``` #[deprecated = "prefer using indexing, e.g., ringbuf[0]"] pub fn get<'a>(&'a self, i: uint) -> &'a T { let idx = self.raw_index(i); match self.elts[idx] { None => fail!(), Some(ref v) => v } } /// Retrieves an element in the `RingBuf` by index. /// /// Fails if there is no element with the given index. /// /// # Example /// /// ```rust /// use std::collections::RingBuf; /// /// let mut buf = RingBuf::new(); /// buf.push(3i); /// buf.push(4); /// buf.push(5); /// *buf.get_mut(1) = 7; /// assert_eq!(buf[1], 7); /// ``` pub fn get_mut<'a>(&'a mut self, i: uint) -> &'a mut T { let idx = self.raw_index(i); match *self.elts.get_mut(idx) { None => fail!(), Some(ref mut v) => v } } /// Swaps elements at indices `i` and `j`. /// /// `i` and `j` may be equal. /// /// Fails if there is no element with either index. /// /// # Example /// /// ```rust /// use std::collections::RingBuf; /// /// let mut buf = RingBuf::new(); /// buf.push(3i); /// buf.push(4); /// buf.push(5); /// buf.swap(0, 2); /// assert_eq!(buf[0], 5); /// assert_eq!(buf[2], 3); /// ``` pub fn swap(&mut self, i: uint, j: uint) { assert!(i < self.len()); assert!(j < self.len()); let ri = self.raw_index(i); let rj = self.raw_index(j); self.elts.as_mut_slice().swap(ri, rj); } /// Returns the index in the underlying `Vec` for a given logical element /// index. fn raw_index(&self, idx: uint) -> uint { raw_index(self.lo, self.elts.len(), idx) } /// Reserves capacity for exactly `n` elements in the given `RingBuf`, /// doing nothing if `self`'s capacity is already equal to or greater /// than the requested capacity. pub fn reserve_exact(&mut self, n: uint) { self.elts.reserve_exact(n); } /// Reserves capacity for at least `n` elements in the given `RingBuf`, /// over-allocating in case the caller needs to reserve additional /// space. /// /// Do nothing if `self`'s capacity is already equal to or greater /// than the requested capacity. pub fn reserve(&mut self, n: uint) { self.elts.reserve(n); } /// Returns a front-to-back iterator. /// /// # Example /// /// ```rust /// use std::collections::RingBuf; /// /// let mut buf = RingBuf::new(); /// buf.push(5i); /// buf.push(3); /// buf.push(4); /// let b: &[_] = &[&5, &3, &4]; /// assert_eq!(buf.iter().collect::<Vec<&int>>().as_slice(), b); /// ``` pub fn iter<'a>(&'a self) -> Items<'a, T> { Items{index: 0, rindex: self.nelts, lo: self.lo, elts: self.elts.as_slice()} } /// Returns a front-to-back iterator which returns mutable references. /// /// # Example /// /// ```rust /// use std::collections::RingBuf; /// /// let mut buf = RingBuf::new(); /// buf.push(5i); /// buf.push(3); /// buf.push(4); /// for num in buf.mut_iter() { /// *num = *num - 2; /// } /// let b: &[_] = &[&mut 3, &mut 1, &mut 2]; /// assert_eq!(buf.mut_iter().collect::<Vec<&mut int>>().as_slice(), b); /// ``` pub fn mut_iter<'a>(&'a mut self) -> MutItems<'a, T> { let start_index = raw_index(self.lo, self.elts.len(), 0); let end_index = raw_index(self.lo, self.elts.len(), self.nelts); // Divide up the array if end_index <= start_index { // Items to iterate goes from: // start_index to self.elts.len() // and then // 0 to end_index let (temp, remaining1) = self.elts.mut_split_at(start_index); let (remaining2, _) = temp.mut_split_at(end_index); MutItems { remaining1: remaining1, remaining2: remaining2, nelts: self.nelts } } else { // Items to iterate goes from start_index to end_index: let (empty, elts) = self.elts.mut_split_at(0); let remaining1 = elts.mut_slice(start_index, end_index); MutItems { remaining1: remaining1, remaining2: empty, nelts: self.nelts } } } } /// `RingBuf` iterator. pub struct Items<'a, T> { lo: uint, index: uint, rindex: uint, elts: &'a [Option<T>], } impl<'a, T> Iterator<&'a T> for Items<'a, T> { #[inline] fn next(&mut self) -> Option<&'a T> { if self.index == self.rindex { return None; } let raw_index = raw_index(self.lo, self.elts.len(), self.index); self.index += 1; Some(self.elts[raw_index].get_ref()) } #[inline] fn size_hint(&self) -> (uint, Option<uint>) { let len = self.rindex - self.index; (len, Some(len)) } } impl<'a, T> DoubleEndedIterator<&'a T> for Items<'a, T> { #[inline] fn next_back(&mut self) -> Option<&'a T> { if self.index == self.rindex { return None; } self.rindex -= 1; let raw_index = raw_index(self.lo, self.elts.len(), self.rindex); Some(self.elts[raw_index].get_ref()) } } impl<'a, T> ExactSize<&'a T> for Items<'a, T> {} impl<'a, T> RandomAccessIterator<&'a T> for Items<'a, T> { #[inline] fn indexable(&self) -> uint { self.rindex - self.index } #[inline] fn idx(&mut self, j: uint) -> Option<&'a T> { if j >= self.indexable() { None } else { let raw_index = raw_index(self.lo, self.elts.len(), self.index + j); Some(self.elts[raw_index].get_ref()) } } } /// `RingBuf` mutable iterator. pub struct MutItems<'a, T> { remaining1: &'a mut [Option<T>], remaining2: &'a mut [Option<T>], nelts: uint, } impl<'a, T> Iterator<&'a mut T> for MutItems<'a, T> { #[inline] #[allow(deprecated)] // mut_shift_ref fn next(&mut self) -> Option<&'a mut T> { if self.nelts == 0 { return None; } let r = if self.remaining1.len() > 0 { &mut self.remaining1 } else { assert!(self.remaining2.len() > 0); &mut self.remaining2 }; self.nelts -= 1; Some(r.mut_shift_ref().unwrap().get_mut_ref()) } #[inline] fn size_hint(&self) -> (uint, Option<uint>) { (self.nelts, Some(self.nelts)) } } impl<'a, T> DoubleEndedIterator<&'a mut T> for MutItems<'a, T> { #[inline] #[allow(deprecated)] // mut_shift_ref fn next_back(&mut self) -> Option<&'a mut T> { if self.nelts == 0 { return None; } let r = if self.remaining2.len() > 0 { &mut self.remaining2 } else { assert!(self.remaining1.len() > 0); &mut self.remaining1 }; self.nelts -= 1; Some(r.mut_pop_ref().unwrap().get_mut_ref()) } } impl<'a, T> ExactSize<&'a mut T> for MutItems<'a, T> {} /// Grow is only called on full elts, so nelts is also len(elts), unlike /// elsewhere. fn grow<T>(nelts: uint, loptr: &mut uint, elts: &mut Vec<Option<T>>) { assert_eq!(nelts, elts.len()); let lo = *loptr; elts.reserve(nelts * 2); let newlen = elts.capacity(); /* fill with None */ for _ in range(elts.len(), newlen) { elts.push(None); } /* Move the shortest half into the newly reserved area. lo ---->| nelts ----------->| [o o o|o o o o o] A [. . .|o o o o o o o o|. . . . .] B [o o o|. . . . . . . .|o o o o o] */ assert!(newlen - nelts/2 >= nelts); if lo <= (nelts - lo) { // A for i in range(0u, lo) { elts.as_mut_slice().swap(i, nelts + i); } } else { // B for i in range(lo, nelts) { elts.as_mut_slice().swap(i, newlen - nelts + i); } *loptr += newlen - nelts; } } /// Returns the index in the underlying `Vec` for a given logical element index. fn raw_index(lo: uint, len: uint, index: uint) -> uint { if lo >= len - index { lo + index - len } else { lo + index } } impl<A: PartialEq> PartialEq for RingBuf<A> { fn eq(&self, other: &RingBuf<A>) -> bool { self.nelts == other.nelts && self.iter().zip(other.iter()).all(|(a, b)| a.eq(b)) } fn ne(&self, other: &RingBuf<A>) -> bool { !self.eq(other) } } impl<A: Eq> Eq for RingBuf<A> {} impl<A: PartialOrd> PartialOrd for RingBuf<A> { fn partial_cmp(&self, other: &RingBuf<A>) -> Option<Ordering> { iter::order::partial_cmp(self.iter(), other.iter()) } } impl<A: Ord> Ord for RingBuf<A> { #[inline] fn cmp(&self, other: &RingBuf<A>) -> Ordering { iter::order::cmp(self.iter(), other.iter()) } } impl<S: Writer, A: Hash<S>> Hash<S> for RingBuf<A> { fn hash(&self, state: &mut S) { self.len().hash(state); for elt in self.iter() { elt.hash(state); } } } impl<A> Index<uint, A> for RingBuf<A> { #[inline] #[allow(deprecated)] fn index<'a>(&'a self, i: &uint) -> &'a A { self.get(*i) } } // FIXME(#12825) Indexing will always try IndexMut first and that causes issues. /*impl<A> IndexMut<uint, A> for RingBuf<A> { #[inline] fn index_mut<'a>(&'a mut self, index: &uint) -> &'a mut A { self.get_mut(*index) } }*/ impl<A> FromIterator<A> for RingBuf<A> { fn from_iter<T: Iterator<A>>(iterator: T) -> RingBuf<A> { let (lower, _) = iterator.size_hint(); let mut deq = RingBuf::with_capacity(lower); deq.extend(iterator); deq } } impl<A> Extendable<A> for RingBuf<A> { fn extend<T: Iterator<A>>(&mut self, mut iterator: T) { for elt in iterator { self.push(elt); } } } impl<T: fmt::Show> fmt::Show for RingBuf<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "[")); for (i, e) in self.iter().enumerate() { if i != 0 { try!(write!(f, ", ")); } try!(write!(f, "{}", *e)); } write!(f, "]") } } #[cfg(test)] mod tests { use std::fmt::Show; use std::prelude::*; use std::gc::{GC, Gc}; use std::hash; use test::Bencher; use test; use {Deque, Mutable, MutableSeq}; use super::RingBuf; use vec::Vec; #[test] fn test_simple() { let mut d = RingBuf::new(); assert_eq!(d.len(), 0u); d.push_front(17i); d.push_front(42i); d.push_back(137); assert_eq!(d.len(), 3u); d.push_back(137); assert_eq!(d.len(), 4u); debug!("{:?}", d.front()); assert_eq!(*d.front().unwrap(), 42); debug!("{:?}", d.back()); assert_eq!(*d.back().unwrap(), 137); let mut i = d.pop_front(); debug!("{:?}", i); assert_eq!(i, Some(42)); i = d.pop_back(); debug!("{:?}", i); assert_eq!(i, Some(137)); i = d.pop_back(); debug!("{:?}", i); assert_eq!(i, Some(137)); i = d.pop_back(); debug!("{:?}", i); assert_eq!(i, Some(17)); assert_eq!(d.len(), 0u); d.push_back(3); assert_eq!(d.len(), 1u); d.push_front(2); assert_eq!(d.len(), 2u); d.push_back(4); assert_eq!(d.len(), 3u); d.push_front(1); assert_eq!(d.len(), 4u); debug!("{:?}", d.get(0)); debug!("{:?}", d.get(1)); debug!("{:?}", d.get(2)); debug!("{:?}", d.get(3)); assert_eq!(*d.get(0), 1); assert_eq!(*d.get(1), 2); assert_eq!(*d.get(2), 3); assert_eq!(*d.get(3), 4); } #[test] fn test_boxes() { let a: Gc<int> = box(GC) 5; let b: Gc<int> = box(GC) 72; let c: Gc<int> = box(GC) 64; let d: Gc<int> = box(GC) 175; let mut deq = RingBuf::new(); assert_eq!(deq.len(), 0); deq.push_front(a); deq.push_front(b); deq.push_back(c); assert_eq!(deq.len(), 3); deq.push_back(d); assert_eq!(deq.len(), 4); assert_eq!(deq.front(), Some(&b)); assert_eq!(deq.back(), Some(&d)); assert_eq!(deq.pop_front(), Some(b)); assert_eq!(deq.pop_back(), Some(d)); assert_eq!(deq.pop_back(), Some(c)); assert_eq!(deq.pop_back(), Some(a)); assert_eq!(deq.len(), 0); deq.push_back(c); assert_eq!(deq.len(), 1); deq.push_front(b); assert_eq!(deq.len(), 2); deq.push_back(d); assert_eq!(deq.len(), 3); deq.push_front(a); assert_eq!(deq.len(), 4); assert_eq!(*deq.get(0), a); assert_eq!(*deq.get(1), b); assert_eq!(*deq.get(2), c); assert_eq!(*deq.get(3), d); } #[cfg(test)] fn test_parameterized<T:Clone + PartialEq + Show>(a: T, b: T, c: T, d: T) { let mut deq = RingBuf::new(); assert_eq!(deq.len(), 0); deq.push_front(a.clone()); deq.push_front(b.clone()); deq.push_back(c.clone()); assert_eq!(deq.len(), 3); deq.push_back(d.clone()); assert_eq!(deq.len(), 4); assert_eq!((*deq.front().unwrap()).clone(), b.clone()); assert_eq!((*deq.back().unwrap()).clone(), d.clone()); assert_eq!(deq.pop_front().unwrap(), b.clone()); assert_eq!(deq.pop_back().unwrap(), d.clone()); assert_eq!(deq.pop_back().unwrap(), c.clone()); assert_eq!(deq.pop_back().unwrap(), a.clone()); assert_eq!(deq.len(), 0); deq.push_back(c.clone()); assert_eq!(deq.len(), 1); deq.push_front(b.clone()); assert_eq!(deq.len(), 2); deq.push_back(d.clone()); assert_eq!(deq.len(), 3); deq.push_front(a.clone()); assert_eq!(deq.len(), 4); assert_eq!((*deq.get(0)).clone(), a.clone()); assert_eq!((*deq.get(1)).clone(), b.clone()); assert_eq!((*deq.get(2)).clone(), c.clone()); assert_eq!((*deq.get(3)).clone(), d.clone()); } #[test] fn test_push_front_grow() { let mut deq = RingBuf::new(); for i in range(0u, 66) { deq.push_front(i); } assert_eq!(deq.len(), 66); for i in range(0u, 66) { assert_eq!(*deq.get(i), 65 - i); } let mut deq = RingBuf::new(); for i in range(0u, 66) { deq.push_back(i); } for i in range(0u, 66) { assert_eq!(*deq.get(i), i); } } #[test] fn test_index() { let mut deq = RingBuf::new(); for i in range(1u, 4) { deq.push_front(i); } assert_eq!(deq[1], 2); } #[test] #[should_fail] fn test_index_out_of_bounds() { let mut deq = RingBuf::new(); for i in range(1u, 4) { deq.push_front(i); } deq[3]; } #[bench] fn bench_new(b: &mut test::Bencher) { b.iter(|| { let _: RingBuf<u64> = RingBuf::new(); }) } #[bench] fn bench_push_back(b: &mut test::Bencher) { let mut deq = RingBuf::new(); b.iter(|| { deq.push_back(0i); }) } #[bench] fn bench_push_front(b: &mut test::Bencher) { let mut deq = RingBuf::new(); b.iter(|| { deq.push_front(0i); }) } #[bench] fn bench_grow(b: &mut test::Bencher) { let mut deq = RingBuf::new(); b.iter(|| { for _ in range(0i, 65) { deq.push_front(1i); } }) } #[deriving(Clone, PartialEq, Show)] enum Taggy { One(int), Two(int, int), Three(int, int, int), } #[deriving(Clone, PartialEq, Show)] enum Taggypar<T> { Onepar(int), Twopar(int, int), Threepar(int, int, int), } #[deriving(Clone, PartialEq, Show)] struct RecCy { x: int, y: int, t: Taggy } #[test] fn test_param_int() { test_parameterized::<int>(5, 72, 64, 175); } #[test] fn test_param_at_int() { test_parameterized::<Gc<int>>(box(GC) 5, box(GC) 72, box(GC) 64, box(GC) 175); } #[test] fn test_param_taggy() { test_parameterized::<Taggy>(One(1), Two(1, 2), Three(1, 2, 3), Two(17, 42)); } #[test] fn test_param_taggypar() { test_parameterized::<Taggypar<int>>(Onepar::<int>(1), Twopar::<int>(1, 2), Threepar::<int>(1, 2, 3), Twopar::<int>(17, 42)); } #[test]<|fim▁hole|> let reccy3 = RecCy { x: 1, y: 777, t: Three(1, 2, 3) }; let reccy4 = RecCy { x: 19, y: 252, t: Two(17, 42) }; test_parameterized::<RecCy>(reccy1, reccy2, reccy3, reccy4); } #[test] fn test_with_capacity() { let mut d = RingBuf::with_capacity(0); d.push_back(1i); assert_eq!(d.len(), 1); let mut d = RingBuf::with_capacity(50); d.push_back(1i); assert_eq!(d.len(), 1); } #[test] fn test_with_capacity_non_power_two() { let mut d3 = RingBuf::with_capacity(3); d3.push(1i); // X = None, | = lo // [|1, X, X] assert_eq!(d3.pop_front(), Some(1)); // [X, |X, X] assert_eq!(d3.front(), None); // [X, |3, X] d3.push(3); // [X, |3, 6] d3.push(6); // [X, X, |6] assert_eq!(d3.pop_front(), Some(3)); // Pushing the lo past half way point to trigger // the 'B' scenario for growth // [9, X, |6] d3.push(9); // [9, 12, |6] d3.push(12); d3.push(15); // There used to be a bug here about how the // RingBuf made growth assumptions about the // underlying Vec which didn't hold and lead // to corruption. // (Vec grows to next power of two) //good- [9, 12, 15, X, X, X, X, |6] //bug- [15, 12, X, X, X, |6, X, X] assert_eq!(d3.pop_front(), Some(6)); // Which leads us to the following state which // would be a failure case. //bug- [15, 12, X, X, X, X, |X, X] assert_eq!(d3.front(), Some(&9)); } #[test] fn test_reserve_exact() { let mut d = RingBuf::new(); d.push_back(0u64); d.reserve_exact(50); assert_eq!(d.elts.capacity(), 50); let mut d = RingBuf::new(); d.push_back(0u32); d.reserve_exact(50); assert_eq!(d.elts.capacity(), 50); } #[test] fn test_reserve() { let mut d = RingBuf::new(); d.push_back(0u64); d.reserve(50); assert_eq!(d.elts.capacity(), 64); let mut d = RingBuf::new(); d.push_back(0u32); d.reserve(50); assert_eq!(d.elts.capacity(), 64); } #[test] fn test_swap() { let mut d: RingBuf<int> = range(0i, 5).collect(); d.pop_front(); d.swap(0, 3); assert_eq!(d.iter().map(|&x|x).collect::<Vec<int>>(), vec!(4, 2, 3, 1)); } #[test] fn test_iter() { let mut d = RingBuf::new(); assert_eq!(d.iter().next(), None); assert_eq!(d.iter().size_hint(), (0, Some(0))); for i in range(0i, 5) { d.push_back(i); } { let b: &[_] = &[&0,&1,&2,&3,&4]; assert_eq!(d.iter().collect::<Vec<&int>>().as_slice(), b); } for i in range(6i, 9) { d.push_front(i); } { let b: &[_] = &[&8,&7,&6,&0,&1,&2,&3,&4]; assert_eq!(d.iter().collect::<Vec<&int>>().as_slice(), b); } let mut it = d.iter(); let mut len = d.len(); loop { match it.next() { None => break, _ => { len -= 1; assert_eq!(it.size_hint(), (len, Some(len))) } } } } #[test] fn test_rev_iter() { let mut d = RingBuf::new(); assert_eq!(d.iter().rev().next(), None); for i in range(0i, 5) { d.push_back(i); } { let b: &[_] = &[&4,&3,&2,&1,&0]; assert_eq!(d.iter().rev().collect::<Vec<&int>>().as_slice(), b); } for i in range(6i, 9) { d.push_front(i); } let b: &[_] = &[&4,&3,&2,&1,&0,&6,&7,&8]; assert_eq!(d.iter().rev().collect::<Vec<&int>>().as_slice(), b); } #[test] fn test_mut_rev_iter_wrap() { let mut d = RingBuf::with_capacity(3); assert!(d.mut_iter().rev().next().is_none()); d.push_back(1i); d.push_back(2); d.push_back(3); assert_eq!(d.pop_front(), Some(1)); d.push_back(4); assert_eq!(d.mut_iter().rev().map(|x| *x).collect::<Vec<int>>(), vec!(4, 3, 2)); } #[test] fn test_mut_iter() { let mut d = RingBuf::new(); assert!(d.mut_iter().next().is_none()); for i in range(0u, 3) { d.push_front(i); } for (i, elt) in d.mut_iter().enumerate() { assert_eq!(*elt, 2 - i); *elt = i; } { let mut it = d.mut_iter(); assert_eq!(*it.next().unwrap(), 0); assert_eq!(*it.next().unwrap(), 1); assert_eq!(*it.next().unwrap(), 2); assert!(it.next().is_none()); } } #[test] fn test_mut_rev_iter() { let mut d = RingBuf::new(); assert!(d.mut_iter().rev().next().is_none()); for i in range(0u, 3) { d.push_front(i); } for (i, elt) in d.mut_iter().rev().enumerate() { assert_eq!(*elt, i); *elt = i; } { let mut it = d.mut_iter().rev(); assert_eq!(*it.next().unwrap(), 0); assert_eq!(*it.next().unwrap(), 1); assert_eq!(*it.next().unwrap(), 2); assert!(it.next().is_none()); } } #[test] fn test_from_iter() { use std::iter; let v = vec!(1i,2,3,4,5,6,7); let deq: RingBuf<int> = v.iter().map(|&x| x).collect(); let u: Vec<int> = deq.iter().map(|&x| x).collect(); assert_eq!(u, v); let mut seq = iter::count(0u, 2).take(256); let deq: RingBuf<uint> = seq.collect(); for (i, &x) in deq.iter().enumerate() { assert_eq!(2*i, x); } assert_eq!(deq.len(), 256); } #[test] fn test_clone() { let mut d = RingBuf::new(); d.push_front(17i); d.push_front(42); d.push_back(137); d.push_back(137); assert_eq!(d.len(), 4u); let mut e = d.clone(); assert_eq!(e.len(), 4u); while !d.is_empty() { assert_eq!(d.pop_back(), e.pop_back()); } assert_eq!(d.len(), 0u); assert_eq!(e.len(), 0u); } #[test] fn test_eq() { let mut d = RingBuf::new(); assert!(d == RingBuf::with_capacity(0)); d.push_front(137i); d.push_front(17); d.push_front(42); d.push_back(137); let mut e = RingBuf::with_capacity(0); e.push_back(42); e.push_back(17); e.push_back(137); e.push_back(137); assert!(&e == &d); e.pop_back(); e.push_back(0); assert!(e != d); e.clear(); assert!(e == RingBuf::new()); } #[test] fn test_hash() { let mut x = RingBuf::new(); let mut y = RingBuf::new(); x.push(1i); x.push(2); x.push(3); y.push(0i); y.push(1i); y.pop_front(); y.push(2); y.push(3); assert!(hash::hash(&x) == hash::hash(&y)); } #[test] fn test_ord() { let x = RingBuf::new(); let mut y = RingBuf::new(); y.push(1i); y.push(2); y.push(3); assert!(x < y); assert!(y > x); assert!(x <= x); assert!(x >= x); } #[test] fn test_show() { let ringbuf: RingBuf<int> = range(0i, 10).collect(); assert!(format!("{}", ringbuf).as_slice() == "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"); let ringbuf: RingBuf<&str> = vec!["just", "one", "test", "more"].iter() .map(|&s| s) .collect(); assert!(format!("{}", ringbuf).as_slice() == "[just, one, test, more]"); } }<|fim▁end|>
fn test_param_reccy() { let reccy1 = RecCy { x: 1, y: 2, t: One(1) }; let reccy2 = RecCy { x: 345, y: 2, t: Two(1, 2) };
<|file_name|>ComponentAddon.java<|end_file_name|><|fim▁begin|>/** * L2FProd.com Common Components 7.3 License. * * Copyright 2005-2007 L2FProd.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.l2fprod.common.swing.plaf; /** * Each new component type of the library will contribute an addon to * the LookAndFeelAddons. A <code>ComponentAddon</code> is the * equivalent of a {@link javax.swing.LookAndFeel}but focused on one * component. <br> * * @author <a href="mailto:[email protected]">Frederic Lavigne</a> */ <|fim▁hole|> * @return the name of this addon */ String getName(); /** * Initializes this addon (i.e register UI classes, colors, fonts, * borders, any UIResource used by the component class). When * initializing, the addon can register different resources based on * the addon or the current look and feel. * * @param addon the current addon */ void initialize(LookAndFeelAddons addon); /** * Uninitializes this addon. * * @param addon */ void uninitialize(LookAndFeelAddons addon); }<|fim▁end|>
public interface ComponentAddon { /**
<|file_name|>create_bidder_level_filter_set.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # # Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This example creates a bidder-level filter set. A bidder-level filter set can be used to retrieve aggregated data for all Authorized Buyers accounts under the given bidder account, including the bidder account itself. """ import argparse from datetime import date from datetime import datetime from datetime import timedelta import os import pprint import sys import uuid sys.path.insert(0, os.path.abspath('..')) from googleapiclient.errors import HttpError import samples_util _DATE_FORMAT = '%Y%m%d' _FILTER_SET_NAME_TEMPLATE = ('bidders/{bidders_resource_id}/' 'filterSets/{filtersets_resource_id}') _OWNER_NAME_TEMPLATE = 'bidders/{bidders_resource_id}' _TODAY = date.today() _VALID_ENVIRONMENTS = ('WEB', 'APP') _VALID_FORMATS = ('DISPLAY', 'VIDEO') _VALID_PLATFORMS = ('DESKTOP', 'TABLET', 'MOBILE') _VALID_TIME_SERIES_GRANULARITIES = ('HOURLY', 'DAILY') DEFAULT_BIDDER_RESOURCE_ID = 'ENTER_BIDDER_RESOURCE_ID_HERE' DEFAULT_FILTER_SET_RESOURCE_ID = f'FilterSet_{uuid.uuid4()}' DEFAULT_END_DATE = _TODAY.strftime(_DATE_FORMAT) DEFAULT_START_DATE = (_TODAY - timedelta(days=7)).strftime( _DATE_FORMAT) def main(ad_exchange_buyer, owner_name, body, is_transient): try: # Construct and execute the request. filter_set = ad_exchange_buyer.bidders().filterSets().create( ownerName=owner_name, isTransient=is_transient, body=body).execute() print(f'FilterSet created for bidder: "{owner_name}".') pprint.pprint(filter_set) except HttpError as e: print(e) if __name__ == '__main__': def time_series_granularity_type(s): if s not in _VALID_TIME_SERIES_GRANULARITIES: raise argparse.ArgumentTypeError('Invalid TimeSeriesGranularity ' f'specified: "{s}".') return s def environment_type(s): if s not in _VALID_ENVIRONMENTS: raise argparse.ArgumentTypeError( f'Invalid Environment specified: "{s}".') return s def format_type(s): if s not in _VALID_FORMATS: raise argparse.ArgumentTypeError(f'Invalid Format specified: "{s}".') return s <|fim▁hole|> if s not in _VALID_PLATFORMS: raise argparse.ArgumentTypeError(f'Invalid Platform specified: "{s}".') return s def valid_date(s): try: return datetime.strptime(s, _DATE_FORMAT).date() except ValueError: raise argparse.ArgumentTypeError(f'Invalid date specified: "{s}".') parser = argparse.ArgumentParser( description=('Creates a bidder-level filter set with the specified ' 'options.')) # Required fields. parser.add_argument( '-b', '--bidder_resource_id', default=DEFAULT_BIDDER_RESOURCE_ID, help=('The resource ID of the bidders resource for which the filter set ' 'is being created. This will be used to construct the ownerName ' 'used as a path parameter for filter set requests. For additional ' 'information on how to configure the ownerName path parameter, ' 'see: https://developers.google.com/authorized-buyers/apis/' 'reference/rest/v2beta1/bidders.filterSets/create' '#body.PATH_PARAMETERS.owner_name')) parser.add_argument( '-r', '--resource_id', default=DEFAULT_FILTER_SET_RESOURCE_ID, help=('The resource ID of the filter set. Note that this must be ' 'unique. This will be used to construct the filter set\'s name. ' 'For additional information on how to configure a filter set\'s ' 'name, see: https://developers.google.com/authorized-buyers/apis/' 'reference/rest/v2beta1/bidders.filterSets#FilterSet.FIELDS.name')) parser.add_argument( '--end_date', default=DEFAULT_END_DATE, type=valid_date, help=('The end date for the filter set\'s absoluteDateRange field, which ' 'will be accepted in this example in YYYYMMDD format.')) parser.add_argument( '--start_date', default=DEFAULT_START_DATE, type=valid_date, help=('The start date for the filter set\'s time_range field, which ' 'will be accepted in this example in YYYYMMDD format.')) # Optional fields. parser.add_argument( '-e', '--environment', required=False, type=environment_type, help=('The environment on which to filter.')) parser.add_argument( '-f', '--format', required=False, type=format_type, help=('The format on which to filter.')) parser.add_argument( '-p', '--platforms', required=False, nargs='*', type=platform_type, help=('The platforms on which to filter. The filters represented by ' 'multiple platforms are ORed together. Note that you may specify ' 'more than one using a space as a delimiter.')) parser.add_argument( '-s', '--seller_network_ids', required=False, nargs='*', type=int, help=('The list of IDs for seller networks on which to filter. The ' 'filters represented by multiple seller network IDs are ORed ' 'together. Note that you may specify more than one using a space ' 'as a delimiter.')) parser.add_argument( '-t', '--time_series_granularity', required=False, type=time_series_granularity_type, help=('The granularity of time intervals if a time series breakdown is ' 'desired.')) parser.add_argument( '--is_transient', required=False, default=True, type=bool, help=('Whether the filter set is transient, or should be persisted ' 'indefinitely. In this example, this will default to True.')) args = parser.parse_args() # Build the time_range as an AbsoluteDateRange. time_range = { 'startDate': { 'year': args.start_date.year, 'month': args.start_date.month, 'day': args.start_date.day }, 'endDate': { 'year': args.end_date.year, 'month': args.end_date.month, 'day': args.end_date.day } } # Create a body containing the required fields. BODY = { 'name': _FILTER_SET_NAME_TEMPLATE.format( bidders_resource_id=args.bidder_resource_id, filtersets_resource_id=args.resource_id), # Note: You may alternatively specify relativeDateRange or # realtimeTimeRange. 'absoluteDateRange': time_range } # Add optional fields to body if specified. if args.environment: BODY['environment'] = args.environment if args.format: BODY['format'] = args.format if args.platforms: BODY['platforms'] = args.platforms if args.seller_network_ids: BODY['sellerNetworkIds'] = args.seller_network_ids if args.time_series_granularity: BODY['timeSeriesGranularity'] = args.time_series_granularity try: service = samples_util.GetService('v2beta1') except IOError as ex: print(f'Unable to create adexchangebuyer service - {ex}') print('Did you specify the key file in samples_util.py?') sys.exit(1) main(service, _OWNER_NAME_TEMPLATE.format( bidders_resource_id=args.bidder_resource_id), BODY, args.is_transient)<|fim▁end|>
def platform_type(s):
<|file_name|>live_values.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Live value resolution. Live values are extracted from the known execution context. Requires activity analysis annotations. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import gast from tensorflow.contrib.py2tf.pyct import anno from tensorflow.contrib.py2tf.pyct import transformer from tensorflow.contrib.py2tf.pyct.static_analysis.annos import NodeAnno class LiveValueResolver(transformer.Base): """Annotates nodes with live values.""" def __init__(self, context, literals): super(LiveValueResolver, self).__init__(context) self.literals = literals def visit_ClassDef(self, node): self.generic_visit(node) anno.setanno(node, 'live_val', self.context.namespace[node.name]) return node def visit_Name(self, node): self.generic_visit(node)<|fim▁hole|> if isinstance(node.ctx, gast.Load): assert anno.hasanno(node, NodeAnno.IS_LOCAL), node symbol_is_local = anno.getanno(node, NodeAnno.IS_LOCAL) assert anno.hasanno(node, NodeAnno.IS_MODIFIED_SINCE_ENTRY), node symbol_is_modified = anno.getanno(node, NodeAnno.IS_MODIFIED_SINCE_ENTRY) assert anno.hasanno(node, NodeAnno.IS_PARAM), node symbol_is_param = anno.getanno(node, NodeAnno.IS_PARAM) if not symbol_is_local and not symbol_is_param: if node.id in self.literals: anno.setanno(node, 'live_val', self.literals[node.id]) elif node.id in self.context.namespace: obj = self.context.namespace[node.id] anno.setanno(node, 'live_val', obj) if hasattr(obj, '__name__'): # If the symbol value is for example a primitive, then it will not # have a name. anno.setanno(node, 'fqn', (obj.__name__,)) else: pass # TODO(mdan): Should we raise an error here? # Can encounter this when: # * a symbol truly lacks reference # * a symbol is new, like the new name of a function we just renamed. else: pass # TODO(mdan): Attempt to trace its value through the local chain. # TODO(mdan): Use type annotations as fallback. if not symbol_is_modified: if node.id in self.context.arg_values: obj = self.context.arg_values[node.id] anno.setanno(node, 'live_val', obj) anno.setanno(node, 'fqn', (obj.__class__.__name__,)) return node def visit_Attribute(self, node): self.generic_visit(node) if anno.hasanno(node.value, 'live_val'): assert anno.hasanno(node.value, 'fqn') parent_object = anno.getanno(node.value, 'live_val') if not hasattr(parent_object, node.attr): raise AttributeError('%s has no attribute %s' % (parent_object, node.attr)) anno.setanno(node, 'parent_type', type(parent_object)) anno.setanno(node, 'live_val', getattr(parent_object, node.attr)) anno.setanno(node, 'fqn', anno.getanno(node.value, 'fqn') + (node.attr,)) # TODO(mdan): Investigate the role built-in annotations can play here. elif anno.hasanno(node.value, 'type'): parent_type = anno.getanno(node.value, 'type') if hasattr(parent_type, node.attr): # This should hold for static members like methods. # This would not hold for dynamic members like function attributes. # For the dynamic case, we simply leave the node without an annotation, # and let downstream consumers figure out what to do. anno.setanno(node, 'parent_type', parent_type) anno.setanno(node, 'live_val', getattr(parent_type, node.attr)) anno.setanno(node, 'fqn', anno.getanno(node.value, 'type_fqn') + (node.attr,)) elif isinstance(node.value, gast.Name): stem_name = node.value # All nonlocal symbols should be fully resolved. assert anno.hasanno(stem_name, NodeAnno.IS_LOCAL), stem_name # TODO(mdan): Figure out what to do when calling attribute on local object # Maybe just leave as-is? return node def resolve(node, context, literals): return LiveValueResolver(context, literals).visit(node)<|fim▁end|>
<|file_name|>test_vector.cpp<|end_file_name|><|fim▁begin|>#include <gtest/gtest.h> #include <iostream> #include "geometry.hpp" #include "vector.hpp"<|fim▁hole|>TEST(Vector, OrthogonalBasis) { constexpr const int dim = 3; using fptype = float; const fptype eps = 1e-4; Array<fptype, dim> tests[] = { {{1.0, 0.0, 0.0}}, {{0.0, 1.0, 0.0}}, {{0.0, 0.0, 1.0}}, {{1.0, 1.0, 0.0}}, {{1.0, 0.0, 1.0}}, {{0.0, 1.0, 1.0}}, {{1.0, 1.0, 1.0}}, {{1.0, 1.0, 2.0}}, {{1.0, 1.0e3, 1.0e-3}}}; for(auto t : tests) { Geometry::Vector<dim, fptype> v(t); for(unsigned i = 0; i < dim - 1; i++) { auto o1 = v.getOrthogonal(i); fptype dp = o1.dot(v); EXPECT_NEAR(dp, 0.0, eps); for(unsigned j = i + 1; j < dim - 1; j++) { auto o2 = v.getOrthogonal(j); dp = o1.dot(o2); EXPECT_NEAR(dp, 0.0, eps); } } } }<|fim▁end|>
#include "array.hpp"
<|file_name|>network_dropdown.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. /** * @fileoverview Network drop-down implementation. */ cr.define('cr.ui', function() { /** * Creates a new container for the drop down menu items. * @constructor * @extends {HTMLDivElement} */ var DropDownContainer = cr.ui.define('div'); DropDownContainer.prototype = { __proto__: HTMLDivElement.prototype, /** @override */ decorate: function() { this.classList.add('dropdown-container'); // Selected item in the menu list. this.selectedItem = null; // First item which could be selected. this.firstItem = null; this.setAttribute('role', 'menu'); // Whether scroll has just happened. this.scrollJustHappened = false; }, /** * Gets scroll action to be done for the item. * @param {!Object} item Menu item. * @return {integer} -1 for scroll up; 0 for no action; 1 for scroll down. */ scrollAction: function(item) { var thisTop = this.scrollTop; var thisBottom = thisTop + this.offsetHeight; var itemTop = item.offsetTop; var itemBottom = itemTop + item.offsetHeight; if (itemTop <= thisTop) return -1; if (itemBottom >= thisBottom) return 1; return 0; }, /** * Selects new item. * @param {!Object} selectedItem Item to be selected. * @param {boolean} mouseOver Is mouseover event triggered? */ selectItem: function(selectedItem, mouseOver) { if (mouseOver && this.scrollJustHappened) { this.scrollJustHappened = false; return; } if (this.selectedItem) this.selectedItem.classList.remove('hover'); selectedItem.classList.add('hover'); this.selectedItem = selectedItem; if (!this.hidden) { this.previousSibling.setAttribute( 'aria-activedescendant', selectedItem.id); } var action = this.scrollAction(selectedItem); if (action != 0) { selectedItem.scrollIntoView(action < 0); this.scrollJustHappened = true; } } }; /** * Creates a new DropDown div. * @constructor * @extends {HTMLDivElement} */ var DropDown = cr.ui.define('div'); DropDown.ITEM_DIVIDER_ID = -2; DropDown.KEYCODE_DOWN = 40; DropDown.KEYCODE_ENTER = 13; DropDown.KEYCODE_ESC = 27; DropDown.KEYCODE_SPACE = 32; DropDown.KEYCODE_TAB = 9; DropDown.KEYCODE_UP = 38; DropDown.prototype = { __proto__: HTMLDivElement.prototype, /** @override */ decorate: function() { this.appendChild(this.createOverlay_()); this.appendChild(this.title_ = this.createTitle_()); var container = new DropDownContainer(); container.id = this.id + '-dropdown-container'; this.appendChild(container); this.addEventListener('keydown', this.keyDownHandler_); this.title_.id = this.id + '-dropdown'; this.title_.setAttribute('role', 'button'); this.title_.setAttribute('aria-haspopup', 'true'); this.title_.setAttribute('aria-owns', container.id); }, /** * Returns true if dropdown menu is shown. * @type {bool} Whether menu element is shown. */ get isShown() { return !this.container.hidden; }, /** * Sets dropdown menu visibility. * @param {bool} show New visibility state for dropdown menu. */ set isShown(show) { this.firstElementChild.hidden = !show; this.container.hidden = !show; if (show) { this.container.selectItem(this.container.firstItem, false); } else { this.title_.removeAttribute('aria-activedescendant'); } }, /** * Returns container of the menu items. */ get container() { return this.lastElementChild; }, /** * Sets title and icon. * @param {string} title Text on dropdown. * @param {string} icon Icon in dataURL format. */ setTitle: function(title, icon) { this.title_.firstElementChild.src = icon; this.title_.lastElementChild.textContent = title; }, /** * Sets dropdown items. * @param {Array} items Dropdown items array. */ setItems: function(items) { this.container.innerHTML = '';<|fim▁hole|> var item = items[i]; if ('sub' in item) { // Workaround for submenus, add items on top level. // TODO(altimofeev): support submenus. for (var j = 0; j < item.sub.length; ++j) this.createItem_(this.container, item.sub[j]); continue; } this.createItem_(this.container, item); } this.container.selectItem(this.container.firstItem, false); }, /** * Id of the active drop-down element. * @private */ activeElementId_: '', /** * Creates dropdown item element and adds into container. * @param {HTMLElement} container Container where item is added. * @param {!Object} item Item to be added. * @private */ createItem_: function(container, item) { var itemContentElement; var className = 'dropdown-item'; if (item.id == DropDown.ITEM_DIVIDER_ID) { className = 'dropdown-divider'; itemContentElement = this.ownerDocument.createElement('hr'); } else { var span = this.ownerDocument.createElement('span'); itemContentElement = span; span.textContent = item.label; if ('bold' in item && item.bold) span.classList.add('bold'); var image = this.ownerDocument.createElement('img'); image.alt = ''; image.classList.add('dropdown-image'); if (item.icon) image.src = item.icon; } var itemElement = this.ownerDocument.createElement('div'); itemElement.classList.add(className); itemElement.appendChild(itemContentElement); itemElement.iid = item.id; itemElement.controller = this; var enabled = 'enabled' in item && item.enabled; if (!enabled) itemElement.classList.add('disabled-item'); if (item.id > 0) { var wrapperDiv = this.ownerDocument.createElement('div'); wrapperDiv.setAttribute('role', 'menuitem'); wrapperDiv.id = this.id + item.id; if (!enabled) wrapperDiv.setAttribute('aria-disabled', 'true'); wrapperDiv.classList.add('dropdown-item-container'); var imageDiv = this.ownerDocument.createElement('div'); imageDiv.appendChild(image); wrapperDiv.appendChild(imageDiv); wrapperDiv.appendChild(itemElement); wrapperDiv.addEventListener('click', function f(e) { var item = this.lastElementChild; if (item.iid < -1 || item.classList.contains('disabled-item')) return; item.controller.isShown = false; if (item.iid >= 0) chrome.send('networkItemChosen', [item.iid]); this.parentNode.parentNode.title_.focus(); }); wrapperDiv.addEventListener('mouseover', function f(e) { this.parentNode.selectItem(this, true); }); itemElement = wrapperDiv; } container.appendChild(itemElement); if (!container.firstItem && item.id >= 0) { container.firstItem = itemElement; } }, /** * Creates dropdown overlay element, which catches outside clicks. * @type {HTMLElement} * @private */ createOverlay_: function() { var overlay = this.ownerDocument.createElement('div'); overlay.classList.add('dropdown-overlay'); overlay.addEventListener('click', function() { this.parentNode.title_.focus(); this.parentNode.isShown = false; }); return overlay; }, /** * Creates dropdown title element. * @type {HTMLElement} * @private */ createTitle_: function() { var image = this.ownerDocument.createElement('img'); image.alt = ''; image.classList.add('dropdown-image'); var text = this.ownerDocument.createElement('div'); var el = this.ownerDocument.createElement('div'); el.appendChild(image); el.appendChild(text); el.tabIndex = 0; el.classList.add('dropdown-title'); el.iid = -1; el.controller = this; el.inFocus = false; el.opening = false; el.addEventListener('click', function f(e) { this.controller.isShown = !this.controller.isShown; }); el.addEventListener('focus', function(e) { this.inFocus = true; }); el.addEventListener('blur', function(e) { this.inFocus = false; }); el.addEventListener('keydown', function f(e) { if (this.inFocus && !this.controller.isShown && (e.keyCode == DropDown.KEYCODE_ENTER || e.keyCode == DropDown.KEYCODE_SPACE || e.keyCode == DropDown.KEYCODE_UP || e.keyCode == DropDown.KEYCODE_DOWN)) { this.opening = true; this.controller.isShown = true; e.stopPropagation(); e.preventDefault(); } }); return el; }, /** * Handles keydown event from the keyboard. * @private * @param {!Event} e Keydown event. */ keyDownHandler_: function(e) { if (!this.isShown) return; var selected = this.container.selectedItem; var handled = false; switch (e.keyCode) { case DropDown.KEYCODE_UP: { do { selected = selected.previousSibling; if (!selected) selected = this.container.lastElementChild; } while (selected.iid < 0); this.container.selectItem(selected, false); handled = true; break; } case DropDown.KEYCODE_DOWN: { do { selected = selected.nextSibling; if (!selected) selected = this.container.firstItem; } while (selected.iid < 0); this.container.selectItem(selected, false); handled = true; break; } case DropDown.KEYCODE_ESC: { this.isShown = false; handled = true; break; } case DropDown.KEYCODE_TAB: { this.isShown = false; handled = true; break; } case DropDown.KEYCODE_ENTER: { if (!this.title_.opening) { this.title_.focus(); this.isShown = false; var item = this.title_.controller.container.selectedItem.lastElementChild; if (item.iid >= 0 && !item.classList.contains('disabled-item')) chrome.send('networkItemChosen', [item.iid]); } handled = true; break; } } if (handled) { e.stopPropagation(); e.preventDefault(); } this.title_.opening = false; } }; /** * Updates networks list with the new data. * @param {!Object} data Networks list. */ DropDown.updateNetworks = function(data) { if (DropDown.activeElementId_) $(DropDown.activeElementId_).setItems(data); }; /** * Updates network title, which is shown by the drop-down. * @param {string} title Title to be displayed. * @param {!Object} icon Icon to be displayed. */ DropDown.updateNetworkTitle = function(title, icon) { if (DropDown.activeElementId_) $(DropDown.activeElementId_).setTitle(title, icon); }; /** * Activates network drop-down. Only one network drop-down * can be active at the same time. So activating new drop-down deactivates * the previous one. * @param {string} elementId Id of network drop-down element. * @param {boolean} isOobe Whether drop-down is used by an Oobe screen. * @param {integer} lastNetworkType Last active network type. Pass -1 if it * isn't known. */ DropDown.show = function(elementId, isOobe, lastNetworkType) { $(elementId).isShown = false; if (DropDown.activeElementId_ != elementId) { DropDown.activeElementId_ = elementId; chrome.send('networkDropdownShow', [elementId, isOobe, lastNetworkType]); } }; /** * Deactivates network drop-down. Deactivating inactive drop-down does * nothing. * @param {string} elementId Id of network drop-down element. */ DropDown.hide = function(elementId) { if (DropDown.activeElementId_ == elementId) { DropDown.activeElementId_ = ''; chrome.send('networkDropdownHide'); } }; /** * Refreshes network drop-down. Should be called on language change. */ DropDown.refresh = function() { chrome.send('networkDropdownRefresh'); }; return { DropDown: DropDown }; });<|fim▁end|>
this.container.firstItem = null; this.container.selectedItem = null; for (var i = 0; i < items.length; ++i) {
<|file_name|>fasta_name_filter.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ FastaNameFilter Description: Filter a fasta file based on a string to find in the sequences headers, or given a file with a list of id fastaNameFilter.py -i input.fa -o output.fa -s "stringtofind" fastaNameFilter.py -i input.fa -o output.fa -f sequencesnames.ids ----------------------------------------------------------------------- Author: This software is written and maintained by Pierre Pericard ([email protected]) Created: 2014 Last Modified: 2016-01-13 Licence: GNU GPL 3.0 Copyright 2014-2016 Pierre Pericard This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License<|fim▁hole|> import sys import os import argparse import string import re def read_fasta_file_handle(fasta_file_handle): """ Parse a fasta file and return a generator """ # Variables initialization header = '' seqlines = list() sequence_nb = 0 # Reading input file for line in fasta_file_handle: if line[0] == '>': # Yield the last read header and sequence if sequence_nb: yield (header, ''.join(seqlines)) del seqlines[:] # Get header header = line[1:].rstrip() sequence_nb += 1 else: # Concatenate sequence seqlines.append(line.strip()) # Yield the input file last sequence yield (header, ''.join(seqlines)) # Close input file fasta_file_handle.close() def format_seq(seq, linereturn=80): """ Format an input sequence """ buff = list() for i in range(0, len(seq), linereturn): buff.append("{0}\n".format(seq[i:(i + linereturn)])) return ''.join(buff).rstrip() if __name__ == '__main__': parser = argparse.ArgumentParser(description='Filter a fasta file based on sequence name.') parser.add_argument('-i', '--input_fasta', metavar='input', type=argparse.FileType('r'), default='-', help='input fasta file') parser.add_argument('-o', '--output_fasta', metavar='output', type=argparse.FileType('w'), default='-', help='ouput fasta file') parser.add_argument('-s', '--stringtofind', metavar='string', type=str, help='String to filter on') parser.add_argument('-f', '--fileids', metavar='file', type=argparse.FileType('r'), help='File with ids') args = parser.parse_args() if not args.stringtofind and not args.fileids: parser.print_help() raise Exception('Either a string or an id file has to be supplied') if args.fileids: ids_list = list() # read ids and store them for line in args.fileids: ids_list.append(line.strip()) # convert the id list to a frozenset for fast search ids_set = frozenset(ids_list) # filter the fasta file for header, sequence in read_fasta_file_handle(args.input_fasta): seq_id = header.split()[0] if seq_id in ids_set: args.output_fasta.write(">{0}\n{1}\n".format(header, format_seq(sequence))) else: tofind = re.compile(args.stringtofind, flags=re.IGNORECASE) for header, sequence in read_fasta_file_handle(args.input_fasta): if tofind.search(header): args.output_fasta.write(">{0}\n{1}\n".format(header, format_seq(sequence)))<|fim▁end|>
along with this program. If not, see <http://www.gnu.org/licenses/>. """
<|file_name|>FEM.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import math import Sofa def tostr(L): return str(L).replace('[', '').replace("]", '').replace(",", ' ') def transform(T,p): return [T[0][0]*p[0]+T[0][1]*p[1]+T[0][2]*p[2]+T[1][0],T[0][3]*p[0]+T[0][4]*p[1]+T[0][5]*p[2]+T[1][1],T[0][6]*p[0]+T[0][7]*p[1]+T[0][8]*p[2]+T[1][2]] def transformF(T,F): return [T[0][0]*F[0]+T[0][1]*F[3]+T[0][2]*F[6],T[0][0]*F[1]+T[0][1]*F[4]+T[0][2]*F[7],T[0][0]*F[2]+T[0][1]*F[5]+T[0][2]*F[8],T[0][3]*F[0]+T[0][4]*F[3]+T[0][5]*F[6],T[0][3]*F[1]+T[0][4]*F[4]+T[0][5]*F[7],T[0][3]*F[2]+T[0][4]*F[5]+T[0][5]*F[8],T[0][6]*F[0]+T[0][7]*F[3]+T[0][8]*F[6],T[0][6]*F[1]+T[0][7]*F[4]+T[0][8]*F[7],T[0][6]*F[2]+T[0][7]*F[5]+T[0][8]*F[8]]<|fim▁hole|>def compare(p1,p2): res = 0 for i,P1 in enumerate(p1): for j,item in enumerate(P1): res = res+ (item-p2[i][j])*(item-p2[i][j]) return res ERRORTOL = 1e-5 T = [[2,0,0,0,2,0,0,0,2],[0,0,0]] #T = [[0.8,1.2,0.3,0,1.9,0.45,0.5,2.8,0.2],[5,2,8]] samples= [[0.5,0.5,0.5], [0.23,0.5,0.8], [0,0.12,0], [0.8,0,0.58]] # scene creation method def createScene(rootNode): rootNode.createObject('RequiredPlugin', pluginName="Flexible") rootNode.createObject('VisualStyle', displayFlags="showBehaviorModels") restpos = [[0, 0, 0], [1, 0, 0], [0, 1, 0], [1, 1, 0], [0, 0, 1], [1, 0, 1], [0, 1, 1], [1, 1, 1]] pos = [transform(T,item) for item in restpos] ########################################################### simNode = rootNode.createChild('Hexa_barycentric') simNode.createObject('MeshTopology', name="mesh", position=tostr(restpos), hexahedra="0 1 3 2 4 5 7 6") simNode.createObject('MechanicalObject', template="Vec3d", name="parent", rest_position="@mesh.position",position=tostr(pos) ) simNode.createObject('BarycentricShapeFunction', position="@parent.rest_position", nbRef="8") childNode = simNode.createChild('childP') childNode.createObject('MechanicalObject', template="Vec3d", name="child", position=tostr(samples) , showObject="1") childNode.createObject('LinearMapping', template="Vec3d,Vec3d") childNode = simNode.createChild('childF') childNode.createObject('GaussPointContainer', position=tostr(samples)) childNode.createObject('MechanicalObject', template="F331", name="child") childNode.createObject('LinearMapping', template="Vec3d,F331", showDeformationGradientScale="1") childNode = simNode.createChild('Visu') childNode.createObject('VisualModel', color="8e-1 8e-1 1 1e-1") childNode.createObject('IdentityMapping') childNode = simNode.createChild('Visu2') childNode.createObject('VisualStyle', displayFlags="showWireframe") childNode.createObject('VisualModel', color="8e-1 8e-1 1 1") childNode.createObject('IdentityMapping') simNode.createObject('PythonScriptController',filename="FEM.py", classname="Controller") ########################################################### simNode = rootNode.createChild('Tetra_barycentric') simNode.createObject('MeshTopology', name="mesh", position=tostr(restpos), tetrahedra="0 5 1 7 0 1 2 7 1 2 7 3 7 2 0 6 7 6 0 5 6 5 4 0") simNode.createObject('MechanicalObject', template="Vec3d", name="parent", rest_position="@mesh.position",position=tostr(pos) ) simNode.createObject('BarycentricShapeFunction', position="@parent.rest_position", nbRef="4") childNode = simNode.createChild('childP') childNode.createObject('MechanicalObject', template="Vec3d", name="child", position=tostr(samples) , showObject="1") childNode.createObject('LinearMapping', template="Vec3d,Vec3d") childNode = simNode.createChild('childF') childNode.createObject('GaussPointContainer', position=tostr(samples)) childNode.createObject('MechanicalObject', template="F331", name="child") childNode.createObject('LinearMapping', template="Vec3d,F331") simNode.createObject('PythonScriptController',filename="FEM.py", classname="Controller") ########################################################### simNode = rootNode.createChild('Hexa_shepard') simNode.createObject('MeshTopology', name="mesh", position=tostr(restpos), hexahedra="0 1 3 2 4 5 7 6") simNode.createObject('MechanicalObject', template="Vec3d", name="parent", rest_position="@mesh.position",position=tostr(pos) ) simNode.createObject('ShepardShapeFunction', position="@parent.rest_position", power="2") childNode = simNode.createChild('childP') childNode.createObject('MechanicalObject', template="Vec3d", name="child", position=tostr(samples) , showObject="1") childNode.createObject('LinearMapping', template="Vec3d,Vec3d") childNode = simNode.createChild('childF') childNode.createObject('GaussPointContainer', position=tostr(samples)) childNode.createObject('MechanicalObject', template="F331", name="child") childNode.createObject('LinearMapping', template="Vec3d,F331") simNode.createObject('PythonScriptController',filename="FEM.py", classname="Controller") ########################################################### rootNode.animate=1 return rootNode class Controller(Sofa.PythonScriptController): def createGraph(self,node): self.node=node self.done=0 return 0 def onEndAnimationStep(self,dt): if self.done==0: print "TEST "+self.node.name+":" # test points restpos = self.node.getObject('childP/child').findData('rest_position').value refpos = [transform(T,item) for item in restpos] pos = self.node.getObject('childP/child').findData('position').value error = compare(refpos,pos) if error>ERRORTOL : print "\t"+"\033[91m"+"[FAILED]"+"\033[0m"+" error on P= "+str(error) else : print "\t"+"\033[92m"+"[OK]"+"\033[0m"+" error on P= "+str(error) # test defo gradients restpos = [1,0,0,0,1,0,0,0,1] pos = self.node.getObject('childF/child').findData('position').value refpos = [transformF(T,restpos) for item in pos] error = compare(refpos,pos) if error>ERRORTOL : print "\t"+"\033[91m"+"[FAILED]"+"\033[0m"+" error on F= "+str(error) else : print "\t"+"\033[92m"+"[OK]"+"\033[0m"+" error on F= "+str(error) self.done=1 return 0<|fim▁end|>
<|file_name|>browser.js<|end_file_name|><|fim▁begin|>export const browserVersions = () => { let u = navigator.userAgent return { // 移动终端浏览器版本信息 trident: u.indexOf('Trident') > -1, // IE内核 presto: u.indexOf('Presto') > -1, // opera内核 webKit: u.indexOf('AppleWebKit') > -1, // 苹果、谷歌内核<|fim▁hole|> gecko: u.indexOf('Gecko') > -1 && u.indexOf('KHTML') === -1, // 火狐内核 mobile: !!u.match(/AppleWebKit.*Mobile.*/), // 是否为移动终端 ios: !!u.match(/\(i[^;]+;( U;)? CPU.+Mac OS X/), // ios终端 android: u.indexOf('Android') > -1 || u.indexOf('Linux') > -1, // android终端或者uc浏览器 iPhone: u.indexOf('iPhone') > -1, // 是否为iPhone或者QQHD浏览器 iPad: u.indexOf('iPad') > -1, // 是否iPad webApp: u.indexOf('Safari') === -1 // 是否web应该程序,没有头部与底部 } }<|fim▁end|>
<|file_name|>analytics.js<|end_file_name|><|fim▁begin|>// GoSquared var GoSquared = {}; GoSquared.acct = "GSN-064561-T"; (function(w){ function gs(){ w._gstc_lt = +new Date; var d = document, g = d.createElement("script"); g.type = "text/javascript"; g.src = "//d1l6p2sc9645hc.cloudfront.net/tracker.js"; var s = d.getElementsByTagName("script")[0]; s.parentNode.insertBefore(g, s); } w.addEventListener ? w.addEventListener("load", gs, false) : w.attachEvent("onload", gs);<|fim▁hole|> // Google Analytics var _gaq = _gaq || []; _gaq.push(['_setAccount', 'UA-40084408-1']); _gaq.push(['_trackPageview']); (function() { var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); })();<|fim▁end|>
})(window);
<|file_name|>AjaxTargetUpdateTestInner.java<|end_file_name|><|fim▁begin|>package forscher.nocket.page.gen.ajax; import gengui.annotations.Eager; import java.io.Serializable; public class AjaxTargetUpdateTestInner implements Serializable { private String feld1; private String feld2; <|fim▁hole|> @Eager public void setEagerFeld1(String feld1) { this.feld1 = feld1; } public String getFeld2() { return feld2; } public void setFeld2(String feld2) { this.feld2 = feld2; } }<|fim▁end|>
public String getEagerFeld1() { return feld1; }
<|file_name|>eightball.py<|end_file_name|><|fim▁begin|># Copyright the Karmabot authors and contributors. # All rights reserved. See AUTHORS. # # This file is part of 'karmabot' and is distributed under the BSD license. # See LICENSE for more details. from karmabot.core.facets import Facet from karmabot.core.commands import CommandSet, thing import random predictions = [ "As I see it, yes", "It is certain", "It is decidedly so", "Most likely", "Outlook good", "Signs point to yes", "Without a doubt", "Yes", "Yes - definitely", "You may rely on it", "Reply hazy, try again", "Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again", "Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful"] @thing.facet_classes.register class EightBallFacet(Facet): name = "eightball" commands = thing.add_child(CommandSet(name)) @classmethod def does_attach(cls, thing):<|fim▁hole|> context.reply(random.choice(predictions) + ".")<|fim▁end|>
return thing.name == "eightball" @commands.add("shake {thing}", help="shake the magic eightball") def shake(self, thing, context):
<|file_name|>assignment-returned.js<|end_file_name|><|fim▁begin|>import { h } from 'omi'; import createSvgIcon from './utils/createSvgIcon'; export default createSvgIcon(h("path", { d: "M19 3h-4.18C14.4 1.84 13.3 1 12 1c-1.3 0-2.4.84-2.82 2H5c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zm-7 0c.55 0 1 .45 1 1s-.45 1-1 1-1-.45-1-1 .45-1 1-1zm0 15l-5-5h3V9h4v4h3l-5 5z"<|fim▁hole|><|fim▁end|>
}), 'AssignmentReturned');
<|file_name|>frontend.py<|end_file_name|><|fim▁begin|>############################################################################### # # The MIT License (MIT) # # Copyright (c) Tavendo GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software.<|fim▁hole|># IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### from os import environ from twisted.internet import reactor from twisted.internet.defer import inlineCallbacks from autobahn.twisted.wamp import ApplicationSession, ApplicationRunner class Component(ApplicationSession): """ An application component calling the different backend procedures. """ @inlineCallbacks def onJoin(self, details): print("session attached") yield self.call(u'com.arguments.ping') print("Pinged!") res = yield self.call(u'com.arguments.add2', 2, 3) print("Add2: {}".format(res)) starred = yield self.call(u'com.arguments.stars') print("Starred 1: {}".format(starred)) starred = yield self.call(u'com.arguments.stars', nick=u'Homer') print("Starred 2: {}".format(starred)) starred = yield self.call(u'com.arguments.stars', stars=5) print("Starred 3: {}".format(starred)) starred = yield self.call(u'com.arguments.stars', nick=u'Homer', stars=5) print("Starred 4: {}".format(starred)) orders = yield self.call(u'com.arguments.orders', u'coffee') print("Orders 1: {}".format(orders)) orders = yield self.call(u'com.arguments.orders', u'coffee', limit=10) print("Orders 2: {}".format(orders)) arglengths = yield self.call(u'com.arguments.arglen') print("Arglen 1: {}".format(arglengths)) arglengths = yield self.call(u'com.arguments.arglen', 1, 2, 3) print("Arglen 2: {}".format(arglengths)) arglengths = yield self.call(u'com.arguments.arglen', a=1, b=2, c=3) print("Arglen 3: {}".format(arglengths)) arglengths = yield self.call(u'com.arguments.arglen', 1, 2, 3, a=1, b=2, c=3) print("Arglen 4: {}".format(arglengths)) self.leave() def onDisconnect(self): print("disconnected") reactor.stop() if __name__ == '__main__': runner = ApplicationRunner( environ.get("AUTOBAHN_DEMO_ROUTER", u"ws://127.0.0.1:8080/ws"), u"crossbardemo", debug=False, # optional; log even more details ) runner.run(Component)<|fim▁end|>
# # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
<|file_name|>btpl_value_var_reference252_all_of.py<|end_file_name|><|fim▁begin|># coding: utf-8 """ Onshape REST API The Onshape REST API consumed by all clients. # noqa: E501 The version of the OpenAPI document: 1.111 Contact: [email protected] Generated by: https://openapi-generator.tech """ from __future__ import absolute_import import sys # noqa: F401 import six # noqa: F401 from onshape_client.oas.model_utils import ModelNormal # noqa: F401 try: from onshape_client.oas.models import btp_identifier8 except ImportError: btp_identifier8 = sys.modules["onshape_client.oas.models.btp_identifier8"] class BTPLValueVarReference252AllOf(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = {} validations = {} additional_properties_type = None @staticmethod def openapi_types(): """ This must be a class method so a model may have properties that are of type self, this ensures that we don't create a cyclic import Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ return { "name": (btp_identifier8.BTPIdentifier8,), # noqa: E501 } @staticmethod def discriminator(): return None attribute_map = { "name": "name", # noqa: E501 } @staticmethod def _composed_schemas(): return None<|fim▁hole|> required_properties = set( [ "_data_store", "_check_type", "_from_server", "_path_to_item", "_configuration", ] ) def __init__( self, _check_type=True, _from_server=False, _path_to_item=(), _configuration=None, **kwargs ): # noqa: E501 """btpl_value_var_reference252_all_of.BTPLValueVarReference252AllOf - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _from_server (bool): True if the data is from the server False if the data is from the client (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. name (btp_identifier8.BTPIdentifier8): [optional] # noqa: E501 """ self._data_store = {} self._check_type = _check_type self._from_server = _from_server self._path_to_item = _path_to_item self._configuration = _configuration for var_name, var_value in six.iteritems(kwargs): setattr(self, var_name, var_value)<|fim▁end|>
<|file_name|>closeDialog.js<|end_file_name|><|fim▁begin|>// -*- mode: js; js-indent-level: 4; indent-tabs-mode: nil -*- const Clutter = imports.gi.Clutter; const Gio = imports.gi.Gio; const GLib = imports.gi.GLib; const GObject = imports.gi.GObject; const Lang = imports.lang; const Meta = imports.gi.Meta; const Shell = imports.gi.Shell; const Dialog = imports.ui.dialog; const Main = imports.ui.main; const Tweener = imports.ui.tweener; var FROZEN_WINDOW_BRIGHTNESS = -0.3 var DIALOG_TRANSITION_TIME = 0.15 var ALIVE_TIMEOUT = 5000; var CloseDialog = new Lang.Class({ Name: 'CloseDialog', Extends: GObject.Object, Implements: [ Meta.CloseDialog ], Properties: { 'window': GObject.ParamSpec.override('window', Meta.CloseDialog) }, _init(window) { this.parent(); this._window = window; this._dialog = null; this._timeoutId = 0; }, get window() { return this._window; }, set window(window) { this._window = window; }, _createDialogContent() { let tracker = Shell.WindowTracker.get_default(); let windowApp = tracker.get_window_app(this._window); /* Translators: %s is an application name */ let title = _("“%s” is not responding.").format(windowApp.get_name()); let subtitle = _("You may choose to wait a short while for it to " + "continue or force the application to quit entirely."); let icon = new Gio.ThemedIcon({ name: 'dialog-warning-symbolic' }); return new Dialog.MessageDialogContent({ icon, title, subtitle }); }, _initDialog() { if (this._dialog) return; let windowActor = this._window.get_compositor_private(); this._dialog = new Dialog.Dialog(windowActor, 'close-dialog'); this._dialog.width = windowActor.width; this._dialog.height = windowActor.height; this._dialog.addContent(this._createDialogContent()); this._dialog.addButton({ label: _('Force Quit'), action: this._onClose.bind(this), default: true }); this._dialog.addButton({ label: _('Wait'), action: this._onWait.bind(this), key: Clutter.Escape }); global.focus_manager.add_group(this._dialog); }, _addWindowEffect() { // We set the effect on the surface actor, so the dialog itself // (which is a child of the MetaWindowActor) does not get the // effect applied itself. let windowActor = this._window.get_compositor_private(); let surfaceActor = windowActor.get_first_child(); let effect = new Clutter.BrightnessContrastEffect(); effect.set_brightness(FROZEN_WINDOW_BRIGHTNESS); surfaceActor.add_effect_with_name("gnome-shell-frozen-window", effect); }, _removeWindowEffect() { let windowActor = this._window.get_compositor_private(); let surfaceActor = windowActor.get_first_child(); surfaceActor.remove_effect_by_name("gnome-shell-frozen-window"); }, _onWait() { this.response(Meta.CloseDialogResponse.WAIT); }, _onClose() { this.response(Meta.CloseDialogResponse.FORCE_CLOSE); }, vfunc_show() { if (this._dialog != null) return; Meta.disable_unredirect_for_display(global.display); this._timeoutId = GLib.timeout_add(GLib.PRIORITY_DEFAULT, ALIVE_TIMEOUT, () => { this._window.check_alive(global.display.get_current_time_roundtrip()); return GLib.SOURCE_CONTINUE; }); this._addWindowEffect(); this._initDialog(); this._dialog.scale_y = 0; this._dialog.set_pivot_point(0.5, 0.5); Tweener.addTween(this._dialog, { scale_y: 1, transition: 'linear', time: DIALOG_TRANSITION_TIME, onComplete: () => { Main.layoutManager.trackChrome(this._dialog, { affectsInputRegion: true }); } }); }, vfunc_hide() { if (this._dialog == null) return; Meta.enable_unredirect_for_display(global.display); GLib.source_remove(this._timeoutId); this._timeoutId = 0; let dialog = this._dialog; this._dialog = null; this._removeWindowEffect(); Tweener.addTween(dialog, { scale_y: 0, transition: 'linear', time: DIALOG_TRANSITION_TIME, onComplete: () => { dialog.destroy(); } }); }, vfunc_focus() { if (this._dialog)<|fim▁hole|><|fim▁end|>
this._dialog.grab_key_focus(); } });
<|file_name|>refcounts.py<|end_file_name|><|fim▁begin|>"""Support functions for loading the reference count data file.""" __version__ = '$Revision: 1.2 $' import os import string import sys # Determine the expected location of the reference count file: try: p = os.path.dirname(__file__) except NameError: p = sys.path[0] p = os.path.normpath(os.path.join(os.getcwd(), p, os.pardir, "api", "refcounts.dat")) DEFAULT_PATH = p del p def load(path=DEFAULT_PATH):<|fim▁hole|> d = {} while 1: line = fp.readline() if not line: break line = string.strip(line) if line[:1] in ("", "#"): # blank lines and comments continue parts = string.split(line, ":", 4) function, type, arg, refcount, comment = parts if refcount == "null": refcount = None elif refcount: refcount = int(refcount) else: refcount = None # # Get the entry, creating it if needed: # try: entry = d[function] except KeyError: entry = d[function] = Entry(function) # # Update the entry with the new parameter or the result information. # if arg: entry.args.append((arg, type, refcount)) else: entry.result_type = type entry.result_refs = refcount return d class Entry: def __init__(self, name): self.name = name self.args = [] self.result_type = '' self.result_refs = None def dump(d): """Dump the data in the 'canonical' format, with functions in sorted order.""" items = d.items() items.sort() first = 1 for k, entry in items: if first: first = 0 else: print s = entry.name + ":%s:%s:%s:" if entry.result_refs is None: r = "" else: r = entry.result_refs print s % (entry.result_type, "", r) for t, n, r in entry.args: if r is None: r = "" print s % (t, n, r) def main(): d = load() dump(d) if __name__ == "__main__": main()<|fim▁end|>
return loadfile(open(path)) def loadfile(fp):
<|file_name|>lint-dead-code-5.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(unused_variables)] #![deny(dead_code)] enum Enum1 { Variant1(isize), Variant2 //~ ERROR: variant is never used } enum Enum2 { Variant3(bool), #[allow(dead_code)] Variant4(isize), Variant5 { _x: isize }, //~ ERROR: variant is never used: `Variant5` Variant6(isize), //~ ERROR: variant is never used: `Variant6` _Variant7, } enum Enum3 { //~ ERROR: enum is never used Variant8, Variant9 } fn main() { let v = Enum1::Variant1(1); match v { Enum1::Variant1(_) => (), Enum1::Variant2 => () } let x = Enum2::Variant3(true); }<|fim▁end|>
// file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. //
<|file_name|>calculate-score.js<|end_file_name|><|fim▁begin|>export default function calculateScore (subject, chosenId, time) {<|fim▁hole|> const isCorrectAnswer = subject.id === chosenId let score if(isCorrectAnswer) { if(time < 7) { score = 3 } else { // Needs review score = .9 + (2 * (1/subject.seenCount)) } } else { // Degrees of failure score = 1/subject.seenCount } subject.score = score return [subject, score] }<|fim▁end|>
<|file_name|>charmconfigwatcher_test.go<|end_file_name|><|fim▁begin|>// Copyright 2019 Canonical Ltd. // Licensed under the AGPLv3, see LICENCE file for details. package cache import ( "github.com/juju/errors" "github.com/juju/juju/core/settings" "github.com/juju/names/v4" jc "github.com/juju/testing/checkers" "github.com/prometheus/client_golang/prometheus/testutil" gc "gopkg.in/check.v1" ) const ( branchName = "test-branch" defaultPassword = "default-pass" defaultCharmURL = "default-charm-url" defaultUnitName = "redis/0" ) type charmConfigWatcherSuite struct { EntitySuite } var _ = gc.Suite(&charmConfigWatcherSuite{}) func (s *charmConfigWatcherSuite) TestTrackingBranchChangedNotified(c *gc.C) { // After initializing we expect not miss c.Check(testutil.ToFloat64(s.Gauges.CharmConfigHashCacheMiss), gc.Equals, float64(0)) w := s.newWatcher(c, defaultUnitName, defaultCharmURL) // After initializing the first watcher we expect one change and one miss c.Check(testutil.ToFloat64(s.Gauges.CharmConfigHashCacheMiss), gc.Equals, float64(1)) s.assertOneChange(c, w, map[string]interface{}{"password": defaultPassword}, defaultCharmURL) // Publish a tracked branch change with altered config. b := Branch{ details: BranchChange{ Name: branchName, Config: map[string]settings.ItemChanges{"redis": {settings.MakeAddition("password", "new-pass")}}, }, } // publish the second change. s.Hub.Publish(branchChange, b) s.assertOneChange(c, w, map[string]interface{}{"password": "new-pass"}, defaultCharmURL) // After the branchChange we expect another change and hence inc again. c.Check(testutil.ToFloat64(s.Gauges.CharmConfigHashCacheMiss), gc.Equals, float64(2)) c.Check(testutil.ToFloat64(s.Gauges.CharmConfigHashCacheHit), gc.Equals, float64(0)) w.AssertStops() } func (s *charmConfigWatcherSuite) TestNotTrackingBranchChangedNotNotified(c *gc.C) { // This will initialise the watcher without branch info. w := s.newWatcher(c, "redis/9", defaultCharmURL) s.assertOneChange(c, w, map[string]interface{}{}, defaultCharmURL) // Publish a branch change with altered config. b := Branch{ details: BranchChange{ Name: branchName, Config: map[string]settings.ItemChanges{"redis": {settings.MakeAddition("password", "new-pass")}}, }, } s.Hub.Publish(branchChange, b) // Nothing should change. w.AssertNoChange() w.AssertStops() } func (s *charmConfigWatcherSuite) TestDifferentBranchChangedNotNotified(c *gc.C) { w := s.newWatcher(c, defaultUnitName, defaultCharmURL) s.assertOneChange(c, w, map[string]interface{}{"password": defaultPassword}, defaultCharmURL) // Publish a branch change with a different name to the tracked one. b := Branch{ details: BranchChange{ Name: "some-other-branch", Config: map[string]settings.ItemChanges{"redis": {settings.MakeAddition("password", "new-pass")}}, }, } s.Hub.Publish(branchChange, b) w.AssertNoChange() w.AssertStops() } func (s *charmConfigWatcherSuite) TestTrackingBranchMasterChangedNotified(c *gc.C) { w := s.newWatcher(c, defaultUnitName, defaultCharmURL) s.assertOneChange(c, w, map[string]interface{}{"password": defaultPassword}, defaultCharmURL) // Publish a change to master configuration. hc, _ := newHashCache(map[string]interface{}{"databases": 4}, nil, nil) s.Hub.Publish(applicationConfigChange, hc) s.assertOneChange(c, w, map[string]interface{}{"password": defaultPassword, "databases": 4}, defaultCharmURL) w.AssertStops() } func (s *charmConfigWatcherSuite) TestTrackingBranchCommittedNotNotified(c *gc.C) { w := s.newWatcher(c, "redis/0", defaultCharmURL) s.assertOneChange(c, w, map[string]interface{}{"password": defaultPassword}, defaultCharmURL) // Publish a branch removal. s.Hub.Publish(modelBranchRemove, branchName) w.AssertNoChange() w.AssertStops() } func (s *charmConfigWatcherSuite) TestNotTrackedBranchSeesMasterConfig(c *gc.C) { // Watcher is for a unit not tracking the branch. w := s.newWatcher(c, "redis/9", defaultCharmURL) s.assertOneChange(c, w, map[string]interface{}{}, defaultCharmURL) w.AssertStops() } func (s *charmConfigWatcherSuite) TestSameUnitDifferentCharmURLYieldsDifferentHash(c *gc.C) { w := s.newWatcher(c, defaultUnitName, defaultCharmURL) s.assertOneChange(c, w, map[string]interface{}{"password": defaultPassword}, defaultCharmURL) h1 := w.Watcher.(*CharmConfigWatcher).configHash w.AssertStops() w = s.newWatcher(c, defaultUnitName, "different-charm-url") s.assertOneChange(c, w, map[string]interface{}{"password": defaultPassword}, "different-charm-url") h2 := w.Watcher.(*CharmConfigWatcher).configHash w.AssertStops() c.Check(h1, gc.Not(gc.Equals), h2) } func (s *charmConfigWatcherSuite) newWatcher(c *gc.C, unitName string, charmURL string) StringsWatcherC { appName, err := names.UnitApplication(unitName) c.Assert(err, jc.ErrorIsNil) // The topics can be arbitrary here; // these tests are isolated from actual cache behaviour. cfg := charmConfigWatcherConfig{ model: s.newStubModel(), unitName: unitName, appName: appName, charmURL: charmURL, appConfigChangeTopic: applicationConfigChange, branchChangeTopic: branchChange, branchRemoveTopic: modelBranchRemove, hub: s.Hub, res: s.NewResident(), } w, err := newCharmConfigWatcher(cfg) c.Assert(err, jc.ErrorIsNil) // Wrap the watcher and ensure we get the default notification. wc := NewStringsWatcherC(c, w) return wc } // newStub model sets up a cached model containing a redis application // and a branch with 2 redis units tracking it. func (s *charmConfigWatcherSuite) newStubModel() *stubCharmConfigModel { app := newApplication(nil, s.Gauges, s.Hub, s.NewResident()) app.setDetails(ApplicationChange{ Name: "redis", Config: map[string]interface{}{}}, ) branch := newBranch(s.Gauges, s.Hub, s.NewResident()) branch.setDetails(BranchChange{ Name: branchName, AssignedUnits: map[string][]string{"redis": {"redis/0", "redis/1"}}, Config: map[string]settings.ItemChanges{"redis": {settings.MakeAddition("password", defaultPassword)}}, }) return &stubCharmConfigModel{ app: *app, branches: map[string]Branch{"0": *branch}, metrics: s.Gauges, } } <|fim▁hole|>) { h, err := hashSettings(cfg, extra...) c.Assert(err, jc.ErrorIsNil) wc.AssertOneChange([]string{h}) } type stubCharmConfigModel struct { app Application branches map[string]Branch metrics *ControllerGauges } func (m *stubCharmConfigModel) Application(name string) (Application, error) { if name == m.app.details.Name { return m.app, nil } return Application{}, errors.NotFoundf("application %q", name) } func (m *stubCharmConfigModel) Branches() []Branch { branches := make([]Branch, len(m.branches)) i := 0 for _, b := range m.branches { branches[i] = b.copy() i += 1 } return branches } func (m *stubCharmConfigModel) Metrics() *ControllerGauges { return m.metrics }<|fim▁end|>
// assertWatcherConfig unwraps the charm config watcher and ensures that its // configuration hash matches that of the input configuration map. func (s *charmConfigWatcherSuite) assertOneChange( c *gc.C, wc StringsWatcherC, cfg map[string]interface{}, extra ...string,
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 Google LLC // // Use of this source code is governed by an MIT-style // license that can be found in the LICENSE file or at // https://opensource.org/licenses/MIT. //! Cryptography in Rust.<|fim▁hole|>//! Mundane is a Rust cryptography library backed by BoringSSL that is difficult //! to misuse, ergonomic, and performant (in that order). //! //! # Features //! //! By default, Mundane provides only high-level cryptographic primitives. //! Unless you are implementing cryptographic protocols, these high-level //! primitives should be all you need. However, if you are sure that you need //! something lower level, Mundane provides features to enable a number of //! different low level primitives. //! //! WARNING: Being low level, these primitives provide the programmer with more //! degrees of freedom. There are more conditions that the programmer must meet //! in order to guarantee security, and thus more ways for the programmer to //! shoot themself in the foot. Please only use these primitives if you're aware //! of the risks and are comfortable with the responsibility of using them //! correctly! //! //! **Features** //! //! | Name | Description | //! | -------------- | ------------------------------------| //! | `kdf` | Key derivation functions | //! | `bytes` | Low-level operations on byte slices | //! | `rsa-pkcs1v15` | RSA-PKCS1v1.5 signatures | //! //! # Insecure Operations //! //! Mundane supports one additional feature not listed in the previous section: //! `insecure`. This enables some cryptographic primitives which are today //! considered insecure. These should only be used for compatibility with legacy //! systems - never in new systems! When the `insecure` feature is used, an //! `insecure` module is added to the crate root. All insecure primitives are //! exposed through this module. #![deny(missing_docs)] #![deny(warnings)] // just in case we forget to add #[forbid(unsafe_code)] on new module // definitions #![deny(unsafe_code)] #[macro_use] mod macros; // Forbid unsafe code except in the boringssl module. #[allow(unsafe_code)] mod boringssl; #[cfg(any(doc, feature = "bytes"))] #[forbid(unsafe_code)] pub mod bytes; #[forbid(unsafe_code)] pub mod hash; #[forbid(unsafe_code)] pub mod hmac; #[cfg(any(doc, feature = "insecure"))] #[forbid(unsafe_code)] pub mod insecure; #[cfg(any(doc, feature = "kdf"))] #[forbid(unsafe_code)] pub mod kdf; #[forbid(unsafe_code)] pub mod password; #[forbid(unsafe_code)] pub mod public; #[forbid(unsafe_code)] mod util; use std::fmt::{self, Debug, Display, Formatter}; use boringssl::BoringError; /// Errors generated by this crate. /// /// `Error` represents two types of errors: errors generated by BoringSSL, and /// errors generated by the Rust code in this crate. When printed (using either /// `Display` or `Debug`), BoringSSL errors are of the form `boringssl: /// <error>`, while errors generated by Rust code are of the form `<error>`. pub struct Error(ErrorInner); impl Error { fn new(s: String) -> Error { Error(ErrorInner::Mundane(s)) } } #[doc(hidden)] impl From<BoringError> for Error { fn from(err: BoringError) -> Error { Error(ErrorInner::Boring(err)) } } impl Display for Error { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { match &self.0 { ErrorInner::Mundane(err) => write!(f, "{}", err), ErrorInner::Boring(err) => write!(f, "boringssl: {}", err), } } } impl Debug for Error { fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { match &self.0 { ErrorInner::Mundane(err) => write!(f, "{}", err), ErrorInner::Boring(err) => { if err.stack_depth() == 1 { // Either there was no stack trace, or the stack trace only // contained a single frame. In either case, don't bother // printing a preceding newline. write!(f, "boringssl: {:?}", err) } else { // There's a multi-line stack trace, so print a preceding // newline. write!(f, "boringssl:\n{:?}", err) } } } } } impl std::error::Error for Error {} enum ErrorInner { Mundane(String), Boring(BoringError), } #[cfg(test)] mod tests { use super::Error; #[test] fn test_send() { fn assert_send<T: Send>() {} assert_send::<Error>(); } #[test] fn test_sync() { fn assert_sync<T: Sync>() {} assert_sync::<Error>(); } }<|fim▁end|>
//!
<|file_name|>gossip.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-go. DO NOT EDIT. // source: micro/go-plugins/registry/gossip/proto/gossip.proto package gossip import ( fmt "fmt" proto "github.com/golang/protobuf/proto" math "math" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // Update is the message broadcast type Update struct { // time to live for entry Expires uint64 `protobuf:"varint,1,opt,name=expires,proto3" json:"expires,omitempty"` // type of update Type int32 `protobuf:"varint,2,opt,name=type,proto3" json:"type,omitempty"` // what action is taken Action int32 `protobuf:"varint,3,opt,name=action,proto3" json:"action,omitempty"` // any other associated metadata about the data Metadata map[string]string `protobuf:"bytes,6,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // the payload data; Data []byte `protobuf:"bytes,7,opt,name=data,proto3" json:"data,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *Update) Reset() { *m = Update{} } func (m *Update) String() string { return proto.CompactTextString(m) } func (*Update) ProtoMessage() {} func (*Update) Descriptor() ([]byte, []int) { return fileDescriptor_e81db501087fb3b4, []int{0} } func (m *Update) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Update.Unmarshal(m, b) } func (m *Update) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_Update.Marshal(b, m, deterministic) } func (m *Update) XXX_Merge(src proto.Message) { xxx_messageInfo_Update.Merge(m, src) } func (m *Update) XXX_Size() int { return xxx_messageInfo_Update.Size(m) } func (m *Update) XXX_DiscardUnknown() { xxx_messageInfo_Update.DiscardUnknown(m) } var xxx_messageInfo_Update proto.InternalMessageInfo func (m *Update) GetExpires() uint64 { if m != nil { return m.Expires } return 0 } func (m *Update) GetType() int32 { if m != nil { return m.Type } return 0 } func (m *Update) GetAction() int32 { if m != nil { return m.Action } return 0 } func (m *Update) GetMetadata() map[string]string { if m != nil { return m.Metadata } return nil<|fim▁hole|> if m != nil { return m.Data } return nil } func init() { proto.RegisterType((*Update)(nil), "gossip.Update") proto.RegisterMapType((map[string]string)(nil), "gossip.Update.MetadataEntry") } func init() { proto.RegisterFile("micro/go-plugins/registry/gossip/proto/gossip.proto", fileDescriptor_e81db501087fb3b4) } var fileDescriptor_e81db501087fb3b4 = []byte{ // 223 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x8f, 0x41, 0x4b, 0x03, 0x31, 0x10, 0x85, 0x49, 0xb7, 0x4d, 0xed, 0xa8, 0x20, 0x83, 0x48, 0x10, 0x0f, 0x8b, 0xa7, 0xbd, 0xb8, 0x01, 0x7b, 0x29, 0x7a, 0xf6, 0xe8, 0x25, 0xe0, 0x0f, 0x88, 0x6d, 0x08, 0xc1, 0x76, 0x13, 0x92, 0xa9, 0x98, 0x9f, 0xea, 0xbf, 0x91, 0x26, 0x51, 0xf0, 0xf6, 0xbe, 0x99, 0x37, 0xbc, 0x37, 0xb0, 0x3e, 0xb8, 0x6d, 0xf4, 0xd2, 0xfa, 0x87, 0xb0, 0x3f, 0x5a, 0x37, 0x25, 0x19, 0x8d, 0x75, 0x89, 0x62, 0x96, 0xd6, 0xa7, 0xe4, 0x82, 0x0c, 0xd1, 0x93, 0x6f, 0x30, 0x16, 0x40, 0x5e, 0xe9, 0xfe, 0x9b, 0x01, 0x7f, 0x0b, 0x3b, 0x4d, 0x06, 0x05, 0x2c, 0xcd, 0x57, 0x70, 0xd1, 0x24, 0xc1, 0x7a, 0x36, 0xcc, 0xd5, 0x2f, 0x22, 0xc2, 0x9c, 0x72, 0x30, 0x62, 0xd6, 0xb3, 0x61, 0xa1, 0x8a, 0xc6, 0x1b, 0xe0, 0x7a, 0x4b, 0xce, 0x4f, 0xa2, 0x2b, 0xd3, 0x46, 0xb8, 0x81, 0xb3, 0x83, 0x21, 0xbd, 0xd3, 0xa4, 0x05, 0xef, 0xbb, 0xe1, 0xfc, 0xf1, 0x6e, 0x6c, 0xc9, 0x35, 0x67, 0x7c, 0x6d, 0xeb, 0x97, 0x89, 0x62, 0x56, 0x7f, 0xee, 0x53, 0x4a, 0xb9, 0x5a, 0xf6, 0x6c, 0xb8, 0x50, 0x45, 0xdf, 0x3e, 0xc3, 0xe5, 0x3f, 0x3b, 0x5e, 0x41, 0xf7, 0x61, 0x72, 0x29, 0xb8, 0x52, 0x27, 0x89, 0xd7, 0xb0, 0xf8, 0xd4, 0xfb, 0x63, 0x6d, 0xb7, 0x52, 0x15, 0x9e, 0x66, 0x1b, 0xf6, 0xce, 0xcb, 0xab, 0xeb, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x8c, 0xfb, 0xd3, 0xd6, 0x21, 0x01, 0x00, 0x00, }<|fim▁end|>
} func (m *Update) GetData() []byte {
<|file_name|>Filter.ts<|end_file_name|><|fim▁begin|>module App { export class Filter extends LionSoftAngular.Filter { <|fim▁hole|> protected static addFactoryInjections(injects: string[]) { LionSoftAngular.Filter.addFactoryInjections(injects); this.addInjection(injects, "$filter"); } Translate(langKey: string): string { return this.$filter("translate")(langKey); } } /** * Наследуйте фильтры, которые обрабатываеют перечисления от этого класса. * * В классе наследнике достаточно только указать массив значений перечисления и их ключи локализации в качестве описания. * * В разметке фильтр используется преобразования значения перечисления в его локализованное описание: * <span>{{enumValue | myEnumFilter}}</span> * * а также для получения списка элементов перечисления: * * <ul ng-repeat='enumValue in [] | myEnumFilter'> * <li>{{enumValue | myEnumFilter}}</li> * </ul> * */ export class EnumFilter extends Filter { Source: IKeyValue[]; public Execute(value: number | number[], ...params): any | any[] { if (value === undefined) return ""; if (angular.isArray(value)) { return this.Source.select(k => k.Key).toArray(); } else { return this.Source.where(k => k.Key == <any>value).select(k => this.Translate(k.Value)).firstOrDefault(); } } } }<|fim▁end|>
$filter: ng.IFilterService;
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin<|fim▁hole|>admin.site.register(Organization)<|fim▁end|>
from .models import Organization
<|file_name|>test_grid.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright (C) 2014 Equinor ASA, Norway. # # The file 'test_grid.py' is part of ERT - Ensemble based Reservoir Tool. # # ERT is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ERT is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. # # See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html> # for more details. import os.path import six from unittest import skipIf, skip import time import itertools from numpy import linspace from ecl.util.util import IntVector from ecl import EclDataType, EclUnitTypeEnum from ecl.eclfile import EclKW, EclFile from ecl.grid import EclGrid from ecl.grid import EclGridGenerator as GridGen from ecl.grid.faults import Layer , FaultCollection from ecl.util.test import TestAreaContext from numpy.testing import assert_allclose from tests import EclTest # This dict is used to verify that corners are mapped to the correct # cell with respect to containment. CORNER_HOME = { (0, 0, 0) : 0, (0, 0, 1) : 9, (0, 0, 2) : 18, (0, 0, 3) : 18, (0, 1, 0) : 3, (0, 1, 1) : 12, (0, 1, 2) : 21, (0, 1, 3) : 21, (0, 2, 0) : 6, (0, 2, 1) : 15, (0, 2, 2) : 24, (0, 2, 3) : 24, (0, 3, 0) : 6, (0, 3, 1) : 15, (0, 3, 2) : 24, (0, 3, 3) : 24, (1, 0, 0) : 1, (1, 0, 1) : 10, (1, 0, 2) : 19, (1, 0, 3) : 19, (1, 1, 0) : 4, (1, 1, 1) : 13, (1, 1, 2) : 22, (1, 1, 3) : 22, (1, 2, 0) : 7, (1, 2, 1) : 16, (1, 2, 2) : 25, (1, 2, 3) : 25, (1, 3, 0) : 7, (1, 3, 1) : 16, (1, 3, 2) : 25, (1, 3, 3) : 25, (2, 0, 0) : 2, (2, 0, 1) : 11, (2, 0, 2) : 20, (2, 0, 3) : 20, (2, 1, 0) : 5, (2, 1, 1) : 14, (2, 1, 2) : 23, (2, 1, 3) : 23, (2, 2, 0) : 8, (2, 2, 1) : 17, (2, 2, 2) : 26, (2, 2, 3) : 26, (2, 3, 0) : 8, (2, 3, 1) : 17, (2, 3, 2) : 26, (2, 3, 3) : 26, (3, 0, 0) : 2, (3, 0, 1) : 11, (3, 0, 2) : 20, (3, 0, 3) : 20, (3, 1, 0) : 5, (3, 1, 1) : 14, (3, 1, 2) : 23, (3, 1, 3) : 23, (3, 2, 0) : 8, (3, 2, 1) : 17, (3, 2, 2) : 26, (3, 2, 3) : 26, (3, 3, 0) : 8, (3, 3, 1) : 17, (3, 3, 2) : 26, (3, 3, 3) : 26 } def createVolumeTestGridBase(dim, dV, offset=1): return [ GridGen.create_grid(dim, dV, offset=0), GridGen.create_grid(dim, dV, offset=offset), GridGen.create_grid(dim, dV, offset=offset, irregular_offset=True), GridGen.create_grid(dim, dV, offset=offset, concave=True), GridGen.create_grid(dim, dV, offset=offset, irregular=True), GridGen.create_grid(dim, dV, offset=offset, concave=True, irregular=True), GridGen.create_grid(dim, dV, offset=offset, irregular_offset=True, concave=True), GridGen.create_grid(dim, dV, offset=0, faults=True), GridGen.create_grid(dim, dV, offset=offset, faults=True), GridGen.create_grid(dim, dV, escape_origo_shift=(100, 100, 0), scale=2), GridGen.create_grid(dim, dV, escape_origo_shift=(100, 100, 0), scale=0.5), GridGen.create_grid(dim, dV, escape_origo_shift=(100, 100, 0), translation=(50,50,0)), GridGen.create_grid(dim, dV, escape_origo_shift=(100, 100, 0), rotate=True), GridGen.create_grid(dim, dV, escape_origo_shift=(100, 100, 0), misalign=True), GridGen.create_grid(dim, dV, offset=offset, escape_origo_shift=(100, 100, 0), irregular_offset=True, concave=True, irregular=True, scale=1.5, translation=(5,5,0), rotate=True, misalign=True) ] def createContainmentTestBase(): return [ (3, GridGen.create_grid((6,6,6), (1,1,1), offset=1)), (10, GridGen.create_grid((3,3,3), (1,1,1), offset=1, concave=True)), (4, GridGen.create_grid((10,10,1), (1,1,1), offset=0., misalign=True)), (3, GridGen.create_grid((6,6,6), (1,1,1), offset=0., escape_origo_shift=(100, 100, 0), irregular_offset=True, concave=True, irregular=True, scale=1.5, translation=(5,5,0), misalign=True ) ) ] def getMinMaxValue(grid): corners = [ grid.getCellCorner(i, cell) for i in range(8) for cell in range(grid.getGlobalSize()) ] return [(min(values), max(values)) for values in zip(*corners)] def createWrapperGrid(grid): """ Creates a grid that occupies the same space as the given grid, but that consists of a single cell. """ x, y, z = grid.getNX()-1, grid.getNY()-1, grid.getNZ()-1 corner_pos = [ (0, 0, 0), (x, 0, 0), (0, y, 0), (x, y, 0), (0, 0, z), (x, 0, z), (0, y, z), (x, y, z) ] corners = [ grid.getCellCorner(i, ijk=pos) for i, pos in enumerate(corner_pos) ] return GridGen.create_single_cell_grid(corners) def average(points): p = six.functools.reduce( lambda a, b: (a[0] + b[0], a[1] + b[1], a[2] + b[2]), points) return [elem/float(len(points)) for elem in p] # This test class should only have test cases which do not require # external test data. Tests involving Equinor test data are in the # test_grid_equinor module. class GridTest(EclTest): def test_oom_grid(self): nx = 2000 ny = 2000 nz = 2000 with self.assertRaises(MemoryError): grid = GridGen.createRectangular( (nx,ny,nz), (1,1,1)) def test_posXYEdge(self): nx = 10 ny = 11 grid = GridGen.createRectangular( (nx,ny,1) , (1,1,1) ) self.assertEqual( grid.findCellCornerXY(0,0,0) , 0 ) self.assertEqual( grid.findCellCornerXY(nx,0,0) , nx) self.assertEqual( grid.findCellCornerXY(0 , ny , 0) , (nx + 1 ) * ny ) self.assertEqual( grid.findCellCornerXY(nx,ny,0) , (nx + 1 ) * (ny + 1) - 1) self.assertEqual( grid.findCellCornerXY(0.25,0,0) , 0 ) self.assertEqual( grid.findCellCornerXY(0,0.25,0) , 0 ) self.assertEqual( grid.findCellCornerXY(nx - 0.25,0,0) , nx ) self.assertEqual( grid.findCellCornerXY(nx , 0.25,0) , nx ) self.assertEqual( grid.findCellCornerXY(0 , ny - 0.25, 0) , (nx + 1 ) * ny ) self.assertEqual( grid.findCellCornerXY(0.25 , ny , 0) , (nx + 1 ) * ny ) self.assertEqual( grid.findCellCornerXY(nx -0.25 ,ny,0) , (nx + 1 ) * (ny + 1) - 1) self.assertEqual( grid.findCellCornerXY(nx , ny - 0.25,0) , (nx + 1 ) * (ny + 1) - 1) def test_dims(self): grid = GridGen.createRectangular( (10,20,30) , (1,1,1) ) self.assertEqual( grid.getNX() , 10 ) self.assertEqual( grid.getNY() , 20 ) self.assertEqual( grid.getNZ() , 30 ) self.assertEqual( grid.getGlobalSize() , 30*10*20 ) self.assertEqual( grid.getDims() , (10,20,30,6000) ) def test_create(self): with self.assertRaises(ValueError): grid = GridGen.createRectangular( (10,20,30) , (1,1,1) , actnum = [0,1,1,2]) with self.assertRaises(ValueError): grid = GridGen.createRectangular( (10,20,30) , (1,1,1) , actnum = IntVector(initial_size = 10)) grid = GridGen.createRectangular( (10,20,30) , (1,1,1) ) # actnum=None -> all active self.assertEqual( grid.getNumActive( ) , 30*20*10) actnum = IntVector(default_value = 1 , initial_size = 6000) actnum[0] = 0 actnum[1] = 0 grid = GridGen.createRectangular( (10,20,30) , (1,1,1) , actnum = actnum) self.assertEqual( grid.getNumActive( ) , 30*20*10 - 2) def test_all_iters(self): fk = self.createTestPath('local/ECLIPSE/faarikaal/faarikaal1.EGRID') grid = EclGrid(fk) cell = grid[3455] self.assertEqual(3455, cell.global_index) cell = grid[(4,1,82)] self.assertEqual(3455, cell.global_index) self.assertEqual(grid.cell(global_index=3455), grid.cell(active_index=2000)) self.assertEqual(grid.cell(global_index=3455), grid.cell(i=4, j=1, k=82)) na = grid.get_num_active() self.assertEqual(na, 4160) cnt = 0 for c in grid.cells(active=True): cnt += 1 self.assertTrue(c.active) self.assertEqual(cnt, 4160) cnt = len([c for c in grid.cells()]) self.assertEqual(cnt, len(grid)) def test_repr_and_name(self): grid = GridGen.createRectangular((2,2,2), (10,10,10), actnum=[0,0,0,0,1,1,1,1]) pfx = 'EclGrid(' rep = repr(grid) self.assertEqual(pfx, rep[:len(pfx)]) self.assertEqual(type(rep), type('')) self.assertEqual(type(grid.getName()), type('')) with TestAreaContext("python/ecl_grid/repr"): grid.save_EGRID("CASE.EGRID") g2 = EclGrid("CASE.EGRID") r2 = repr(g2) self.assertEqual(pfx, r2[:len(pfx)]) self.assertEqual(type(r2), type('')) self.assertEqual(type(g2.getName()), type('')) def test_node_pos(self): grid = GridGen.createRectangular( (10,20,30) , (1,1,1) ) with self.assertRaises(IndexError): grid.getNodePos(-1,0,0) with self.assertRaises(IndexError): grid.getNodePos(11,0,0) p0 = grid.getNodePos(0,0,0) self.assertEqual( p0 , (0,0,0)) p7 = grid.getNodePos(10,20,30) self.assertEqual( p7 , (10,20,30)) # The broken file was previously handled by the ecl_file_open() call internally # in the ecl_grid implementation. That will now not fail for a broken file, and then # the grid class needs to do more/better checking itself. @skip("Needs better error checking inside in the ecl_grid") def test_truncated_file(self): grid = GridGen.createRectangular( (10,20,30) , (1,1,1) ) with TestAreaContext("python/ecl_grid/truncated"): grid.save_EGRID( "TEST.EGRID") size = os.path.getsize( "TEST.EGRID") with open("TEST.EGRID" , "r+") as f: f.truncate( size / 2 ) with self.assertRaises(IOError): EclGrid("TEST.EGRID") def test_posXY1(self): nx = 4 ny = 1 nz = 1 grid = GridGen.createRectangular( (nx,ny,nz) , (1,1,1) ) (i,j) = grid.findCellXY( 0.5 , 0.5, 0 ) self.assertEqual(i , 0) self.assertEqual(j , 0) (i,j) = grid.findCellXY( 3.5 , 0.5, 0 ) self.assertEqual(i , 3) self.assertEqual(j , 0) def test_init_ACTNUM(self): nx = 10 ny = 23 nz = 7 grid = GridGen.createRectangular( (nx,ny,nz) , (1,1,1) ) actnum = grid.exportACTNUM() self.assertEqual( len(actnum) , nx*ny*nz ) self.assertEqual( actnum[0] , 1 ) self.assertEqual( actnum[nx*ny*nz - 1] , 1 ) actnum_kw = grid.exportACTNUMKw( ) self.assertEqual(len(actnum_kw) , len(actnum)) for a1,a2 in zip(actnum, actnum_kw): self.assertEqual(a1, a2) def test_posXY(self): nx = 10 ny = 23 nz = 7 grid = GridGen.createRectangular( (nx,ny,nz) , (1,1,1) ) with self.assertRaises(IndexError): grid.findCellXY( 1 , 1, -1 ) with self.assertRaises(IndexError): grid.findCellXY( 1 , 1, nz + 1 ) with self.assertRaises(ValueError): grid.findCellXY(15 , 78 , 2) i,j = grid.findCellXY( 1.5 , 1.5 , 2 ) self.assertEqual(i , 1) self.assertEqual(j , 1) for i in range(nx): for j in range(ny): p = grid.findCellXY(i + 0.5 , j+ 0.5 , 0) self.assertEqual( p[0] , i ) self.assertEqual( p[1] , j ) c = grid.findCellCornerXY( 0.10 , 0.10 , 0 ) self.assertEqual(c , 0) c = grid.findCellCornerXY( 0.90 , 0.90 , 0 ) self.assertEqual( c , (nx + 1) + 1 ) c = grid.findCellCornerXY( 0.10 , 0.90 , 0 ) self.assertEqual( c , (nx + 1) ) <|fim▁hole|> c = grid.findCellCornerXY( 0.90 , 0.10 , 0 ) self.assertEqual( c , 1 ) def test_compressed_copy(self): nx = 10 ny = 10 nz = 10 grid = GridGen.createRectangular( (nx,ny,nz) , (1,1,1) ) kw1 = EclKW("KW" , 1001 , EclDataType.ECL_INT ) with self.assertRaises(ValueError): cp = grid.compressedKWCopy( kw1 ) def test_dxdydz(self): nx = 10 ny = 10 nz = 10 grid = GridGen.createRectangular( (nx,ny,nz) , (2,3,4) ) (dx,dy,dz) = grid.getCellDims( active_index = 0 ) self.assertEqual( dx , 2 ) self.assertEqual( dy , 3 ) self.assertEqual( dz , 4 ) def test_create_3d_is_create_kw_inverse(self): nx = 10 ny = 7 nz = 5 grid = GridGen.create_rectangular((nx, ny, nz), (1, 1, 1)) kw1 = EclKW("SWAT", nx * ny * nz, EclDataType.ECL_FLOAT) for k, j, i in itertools.product(range(nz), range(ny), range(nx)): kw1[i + j * nx + nx * ny * k] = i * j * k numpy_3d = grid.create3D(kw1) kw2 = grid.create_kw(numpy_3d, "SWAT", False) self.assertEqual(kw2.name, "SWAT") assert_allclose(grid.create3D(kw2), numpy_3d) def test_create_3d_agrees_with_get_value(self): nx = 5 ny = 3 nz = 2 grid = GridGen.createRectangular((nx, ny, nz), (1, 1, 1)) kw = EclKW("SWAT", nx * ny * nz, EclDataType.ECL_FLOAT) for k, j, i in itertools.product(range(nz), range(ny), range(nx)): kw[i + j * nx + nx * ny * k] = i * j * k numpy_3d = grid.create3D(kw) for k, j, i in itertools.product(range(nz), range(ny), range(nx)): self.assertAlmostEqual(numpy_3d[i, j, k], grid.grid_value(kw, i, j, k)) def test_len(self): nx = 10 ny = 11 nz = 12 actnum = EclKW( "ACTNUM" , nx*ny*nz , EclDataType.ECL_INT ) actnum[0] = 1 actnum[1] = 1 actnum[2] = 1 actnum[3] = 1 grid = GridGen.createRectangular( (nx,ny,nz) , (1,1,1), actnum = actnum) self.assertEqual( len(grid) , nx*ny*nz ) self.assertEqual( grid.getNumActive( ) , 4 ) def test_export(self): dims = (3, 3, 3) coord = GridGen.create_coord(dims, (1,1,1)) zcorn = GridGen.create_zcorn(dims, (1,1,1), offset=0) grid = EclGrid.create(dims, zcorn, coord, None) self.assertEqual(zcorn, grid.export_zcorn()) self.assertEqual(coord, grid.export_coord()) def test_output_units(self): n = 10 a = 1 grid = GridGen.createRectangular( (n,n,n), (a,a,a)) with TestAreaContext("python/ecl_grid/units"): grid.save_EGRID( "CASE.EGRID" , output_unit = EclUnitTypeEnum.ECL_FIELD_UNITS ) f = EclFile("CASE.EGRID") g = f["GRIDUNIT"][0] self.assertEqual( g[0].strip( ) , "FEET" ) g2 = EclGrid("CASE.EGRID") self.assertFloatEqual( g2.cell_volume( global_index = 0 ) , 3.28084*3.28084*3.28084) grid.save_EGRID( "CASE.EGRID" ) f = EclFile("CASE.EGRID") g = f["GRIDUNIT"][0] self.assertEqual( g[0].strip( ) , "METRES" ) grid.save_EGRID( "CASE.EGRID" , output_unit = EclUnitTypeEnum.ECL_LAB_UNITS) f = EclFile("CASE.EGRID") g = f["GRIDUNIT"][0] self.assertEqual( g[0].strip() , "CM" ) g2 = EclGrid("CASE.EGRID") self.assertFloatEqual( g2.cell_volume( global_index = 0 ) , 100*100*100 ) def test_volume(self): dim = (10,10,10) dV = (2,2,2) grids = createVolumeTestGridBase(dim, dV) for grid in grids: tot_vol = createWrapperGrid(grid).cell_volume(0) cell_volumes = [grid.cell_volume(i) for i in range(grid.getGlobalSize())] self.assertTrue(min(cell_volumes) >= 0) self.assertFloatEqual(sum(cell_volumes), tot_vol) def test_unique_containment(self): test_base = createContainmentTestBase() for steps_per_unit, grid in test_base: wgrid = createWrapperGrid(grid) (xmin, xmax), (ymin, ymax), (zmin, zmax) = getMinMaxValue(wgrid) x_space = linspace( xmin - 1, xmax + 1, int(xmax - xmin + 2) * steps_per_unit + 1 ) y_space = linspace( ymin - 1, ymax + 1, int(ymax - ymin + 2) * steps_per_unit + 1 ) z_space = linspace( zmin - 1, zmax + 1, int(zmax - zmin + 2) * steps_per_unit + 1 ) # limit amount of points tested by # only testing every 3rd point x_space = x_space[0:-1:3] y_space = y_space[0:-1:3] z_space = z_space[0:-1:3] for x, y, z in itertools.product(x_space, y_space, z_space): hits = [ grid.cell_contains(x, y, z, i) for i in range(grid.getGlobalSize()) ].count(True) self.assertIn(hits, [0, 1]) expected = 1 if wgrid.cell_contains(x, y, z, 0) else 0 self.assertEqual( expected, hits, 'Expected %d for (%g,%g,%g), got %d' % (expected, x, y, z, hits) ) def test_cell_corner_containment(self): n = 4 d = 10 grid = GridGen.createRectangular( (n, n, n), (d, d, d)) for x, y, z in itertools.product(range(0, n*d+1, d), repeat=3): self.assertEqual( 1, [grid.cell_contains(x, y, z, i) for i in range(n**3)].count(True) ) def test_cell_corner_containment_compatability(self): grid = GridGen.createRectangular( (3,3,3), (1,1,1) ) for x, y, z in itertools.product(range(4), repeat=3): for i in range(27): if grid.cell_contains(x, y, z, i): self.assertEqual( CORNER_HOME[(x,y,z)], i ) def test_cell_face_containment(self): n = 4 d = 10 grid = GridGen.createRectangular( (n, n, n), (d, d, d)) for x, y, z in itertools.product(range(d//2, n*d, d), repeat=3): for axis, direction in itertools.product(range(3), [-1, 1]): p = [x, y, z] p[axis] = p[axis] + direction*d/2 self.assertEqual( 1, [grid.cell_contains(p[0], p[1], p[2], i) for i in range(n**3)].count(True) ) # This test generates a cell that is concave on ALL 6 sides def test_concave_cell_containment(self): points = [ (5, 5, 5), (20, 10, 10), (10, 20, 10), (25, 25, 5), (10, 10, 20), (25, 5, 25), (5, 25, 25), (20, 20, 20) ] grid = GridGen.create_single_cell_grid(points) assertPoint = lambda p : self.assertTrue( grid.cell_contains(p[0], p[1], p[2], 0) ) assertNotPoint = lambda p : self.assertFalse( grid.cell_contains(p[0], p[1], p[2], 0) ) # Cell center assertPoint(average(points)); # "Side" center assertNotPoint(average(points[0:4:])) assertNotPoint(average(points[4:8:])) assertNotPoint(average(points[1:8:2])) assertNotPoint(average(points[0:8:2])) assertNotPoint(average(points[0:8:4] + points[1:8:4])) assertNotPoint(average(points[2:8:4] + points[3:8:4])) # Corners for p in points: assertPoint(p) # Edges edges = ([(i, i+1) for i in range(0, 8, 2)] + [(i, i+2) for i in [0, 1, 4, 5]] + [(i, i+4) for i in range(4)] + [(1,2), (2,7), (1,7), (4,7), (2,4), (4,1)]) for a,b in edges: assertPoint(average([points[a], points[b]])) # Epsilon inside from corners middle_point = average(points) for p in points: assertPoint(average(20*[p] + [middle_point])) # Espilon outside middle_point[2] = 0 for p in points[0:4:]: assertNotPoint(average(20*[p] + [middle_point])) middle_point[2] = 30 for p in points[4:8:]: assertNotPoint(average(20*[p] + [middle_point])) # This test generates a cell that is strictly convex on ALL 6 sides def test_concvex_cell_containment(self): points = [ (10, 10, 10), (25, 5, 5), (5, 25, 5), (20, 20, 10), (5, 5, 25), (20, 10, 20), (10, 20, 20), (25, 25, 25) ] grid = GridGen.create_single_cell_grid(points) assertPoint = lambda p : self.assertTrue( grid.cell_contains(p[0], p[1], p[2], 0) ) assertNotPoint = lambda p : self.assertFalse( grid.cell_contains(p[0], p[1], p[2], 0) ) # Cell center assertPoint(average(points)); # "Side" center assertPoint(average(points[0:4:])) assertPoint(average(points[4:8:])) assertPoint(average(points[1:8:2])) assertPoint(average(points[0:8:2])) assertPoint(average(points[0:8:4] + points[1:8:4])) assertPoint(average(points[2:8:4] + points[3:8:4])) # Corners for p in points: assertPoint(p) # Edges edges = ([(i, i+1) for i in range(0, 8, 2)] + [(i, i+2) for i in [0, 1, 4, 5]] + [(i, i+4) for i in range(4)] + [(1,2), (2,7), (1,7), (4,7), (2,4), (4,1)]) for a,b in edges: assertPoint(average([points[a], points[b]])) # Epsilon inside from corners middle_point = average(points) for p in points: assertPoint(average(20*[p] + [middle_point])) # Espilon outside middle_point[2] = 0 for p in points[0:4:]: assertNotPoint(average(20*[p] + [middle_point])) middle_point[2] = 30 for p in points[4:8:]: assertNotPoint(average(20*[p] + [middle_point]))<|fim▁end|>
c = grid.findCellCornerXY( 0.90 , 0.90 , 0 ) self.assertEqual( c , (nx + 1) + 1 )
<|file_name|>test_offhours.py<|end_file_name|><|fim▁begin|># Copyright 2015-2017 Capital One Services, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function, unicode_literals import datetime import json import os from dateutil import zoneinfo from mock import mock from .common import BaseTest, instance from c7n.filters import FilterValidationError from c7n.filters.offhours import OffHour, OnHour, ScheduleParser, Time # Per http://blog.xelnor.net/python-mocking-datetime/ # naive implementation has issues with pypy real_datetime_class = datetime.datetime def mock_datetime_now(tgt, dt): class DatetimeSubclassMeta(type): @classmethod def __instancecheck__(mcs, obj): return isinstance(obj, real_datetime_class) class BaseMockedDatetime(real_datetime_class): target = tgt @classmethod def now(cls, tz=None): return cls.target.replace(tzinfo=tz) @classmethod def utcnow(cls): return cls.target # Python2 & Python3 compatible metaclass MockedDatetime = DatetimeSubclassMeta( b'datetime' if str is bytes else 'datetime', # hack Python2/3 port (BaseMockedDatetime,), {}) return mock.patch.object(dt, 'datetime', MockedDatetime) class OffHoursFilterTest(BaseTest): """[off|on] hours testing""" def test_offhours_records(self): session_factory = self.replay_flight_data('test_offhours_records') t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2016, month=8, day=14, hour=19, minute=00) with mock_datetime_now(t, datetime): p = self.load_policy({ 'name': 'offhours-records', 'resource': 'ec2', 'filters': [ {'State.Name': 'running'}, {'type': 'offhour', 'offhour': 19, 'tag': 'custodian_downtime', 'default_tz': 'est', 'weekends': False}] }, session_factory=session_factory) resources = p.run() self.assertEqual(resources, []) with open(os.path.join( p.options['output_dir'], 'offhours-records', 'parse_errors.json')) as fh: data = json.load(fh) self.assertEqual(len(data), 1) self.assertEqual(data[0][0], 'i-0ee3a9bc2eeed269f') self.assertEqual(data[0][1], 'off=[m-f,8];on=[n-f,5];pz=est') with open(os.path.join( p.options['output_dir'], 'offhours-records', 'opted_out.json')) as fh: data = json.load(fh) self.assertEqual(len(data), 1) self.assertEqual(data[0]['InstanceId'], 'i-0a619b58a7e704a9f') def test_validate(self): self.assertRaises( FilterValidationError, OffHour({'default_tz': 'zmta'}).validate) self.assertRaises( FilterValidationError, OffHour({'offhour': 25}).validate) i = OffHour({}) self.assertEqual(i.validate(), i) def test_process(self): f = OffHour({'opt-out': True}) instances = [ instance(Tags=[]), instance( Tags=[{'Key': 'maid_offhours', 'Value': ''}]), instance( Tags=[{'Key': 'maid_offhours', 'Value': 'on'}]), instance( Tags=[{'Key': 'maid_offhours', 'Value': 'off'}]), instance( Tags=[ {'Key': 'maid_offhours', 'Value': "off=(m-f,5);zebrablue,on=(t-w,5)"}])] t = datetime.datetime( year=2015, month=12, day=1, hour=19, minute=5, tzinfo=zoneinfo.gettz('America/New_York')) with mock_datetime_now(t, datetime): self.assertEqual( f.process(instances), [instances[0], instances[1], instances[2]] ) def test_opt_out_behavior(self): # Some users want to match based on policy filters to # a resource subset with default opt out behavior t = datetime.datetime( year=2015, month=12, day=1, hour=19, minute=5, tzinfo=zoneinfo.gettz('America/New_York')) f = OffHour({'opt-out': True}) with mock_datetime_now(t, datetime): i = instance(Tags=[]) self.assertEqual(f(i), True) i = instance( Tags=[{'Key': 'maid_offhours', 'Value': ''}] ) self.assertEqual(f(i), True) i = instance( Tags=[{'Key': 'maid_offhours', 'Value': 'on'}] ) self.assertEqual(f(i), True) i = instance( Tags=[{'Key': 'maid_offhours', 'Value': 'off'}]) self.assertEqual(f(i), False) self.assertEqual(f.opted_out, [i]) def test_opt_in_behavior(self): # Given the addition of opt out behavior, verify if its # not configured that we don't touch an instance that # has no downtime tag i = instance(Tags=[]) i2 = instance(Tags=[{'Key': 'maid_offhours', 'Value': ''}]) i3 = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'on'}]) t = datetime.datetime( year=2015, month=12, day=1, hour=19, minute=5, tzinfo=zoneinfo.gettz('America/New_York')) f = OffHour({}) with mock_datetime_now(t, datetime): self.assertEqual(f(i), False) self.assertEqual(f(i2), True) self.assertEqual(f(i3), True) t = datetime.datetime( year=2015, month=12, day=1, hour=7, minute=5, tzinfo=zoneinfo.gettz('America/New_York')) f = OnHour({}) with mock_datetime_now(t, datetime): self.assertEqual(f(i), False) self.assertEqual(f(i2), True) self.assertEqual(f(i3), True) def xtest_time_match_stops_after_skew(self): hour = 7 t = datetime.datetime( year=2015, month=12, day=1, hour=hour, minute=5, tzinfo=zoneinfo.gettz('America/New_York')) i = instance(Tags=[ {'Key': 'maid_offhours', 'Value': 'tz=est'}]) f = OnHour({'skew': 1}) results = [] with mock_datetime_now(t, datetime) as dt: for n in range(0, 4): dt.target = t.replace(hour=hour + n) results.append(f(i)) self.assertEqual(results, [True, True, False, False]) def test_resource_schedule_error(self): t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2015, month=12, day=1, hour=19, minute=5) f = OffHour({}) f.process_resource_schedule = lambda: False with mock_datetime_now(t, datetime): i = instance(Tags=[ {'Key': 'maid_offhours', 'Value': 'tz=est'}]) self.assertEqual(f(i), False) def test_time_filter_usage_errors(self): self.assertRaises(NotImplementedError, Time, {}) def test_everyday_onhour(self): # weekends on means we match times on the weekend start_day = 14 # sunday t = datetime.datetime( year=2016, day=start_day, month=8, hour=7, minute=20) i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}]) f = OnHour({'weekends': False}) results = [] with mock_datetime_now(t, datetime) as dt: for n in range(7): dt.target = t.replace(day=start_day + n) results.append(f(i)) self.assertEqual(results, [True] * 7) def test_everyday_offhour(self): # weekends on means we match times on the weekend start_day = 14 # sunday t = datetime.datetime( year=2016, day=start_day, month=8, hour=19, minute=20) i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}]) f = OffHour({'weekends': False}) results = [] with mock_datetime_now(t, datetime) as dt: for n in range(7): dt.target = t.replace(day=start_day + n) results.append(f(i)) self.assertEqual(results, [True] * 7) def test_weekends_only_onhour_support(self): # start day is a sunday, weekend only means we only start # on monday morning. start_day = 14 t = datetime.datetime( year=2016, day=start_day, month=8, hour=7, minute=20) i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}]) f = OnHour({'weekends-only': True}) results = [] with mock_datetime_now(t, datetime) as dt: for n in range(7): dt.target = t.replace(day=start_day + n) results.append(f(i)) self.assertEqual(results, [ False, True, False, False, False, False, False]) def test_weekends_only_offhour_support(self): # start day is a sunday, weekend only means we only stop # on friday evening. start_day = 14 t = datetime.datetime( year=2016, day=start_day, month=8, hour=7, minute=20) i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}]) f = OnHour({'weekends-only': True}) results = [] with mock_datetime_now(t, datetime) as dt: for n in range(7): dt.target = t.replace(day=start_day + n) results.append(f(i)) self.assertEqual(results, [ False, True, False, False, False, False, False]) def test_onhour_weekend_support(self): start_day = 14 t = datetime.datetime( year=2016, day=start_day, month=2, hour=19, minute=20) i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}]) f = OffHour({'weekends-only': True}) results = [] with mock_datetime_now(t, datetime) as dt: for n in range(7): dt.target = t.replace(day=start_day + n) results.append(f(i)) self.assertEqual( results, [False, False, False, False, False, True, False]) def test_offhour_weekend_support(self): start_day = 26 t = datetime.datetime( year=2016, day=start_day, month=2, hour=19, minute=20) i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}]) f = OffHour({}) results = [] with mock_datetime_now(t, datetime) as dt: for n in range(0, 4): dt.target = t.replace(day=start_day + n) results.append(f(i)) self.assertEqual(results, [True, False, False, True]) def test_current_time_test(self): t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2015, month=12, day=1, hour=19, minute=5) with mock_datetime_now(t, datetime): i = instance(Tags=[ {'Key': 'maid_offhours', 'Value': 'tz=est'}]) f = OffHour({}) p = f.get_tag_value(i) self.assertEqual(p, 'tz=est') tz = f.get_tz('est') self.assertTrue(str(tz) in ( "tzfile('US/Eastern')",<|fim▁hole|> "tzfile('America/New_York')")) self.assertEqual( datetime.datetime.now(tz), t) self.assertEqual(t.hour, 19) def test_offhours_real_world_values(self): t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2015, month=12, day=1, hour=19, minute=5) with mock_datetime_now(t, datetime): results = [OffHour({})(i) for i in [ instance(Tags=[ {'Key': 'maid_offhours', 'Value': ''}]), instance(Tags=[ {'Key': 'maid_offhours', 'Value': 'on'}]), instance(Tags=[ {'Key': 'maid_offhours', 'Value': '"Offhours tz=ET"'}]), instance(Tags=[ {'Key': 'maid_offhours', 'Value': 'Offhours tz=PT'}])]] # unclear what this is really checking self.assertEqual(results, [True, True, True, True]) def test_offhours_get_value(self): off = OffHour({'default_tz': 'ct'}) i = instance(Tags=[ {'Key': 'maid_offhours', 'Value': 'Offhours tz=PT'}]) self.assertEqual(off.get_tag_value(i), "offhours tz=pt") self.assertFalse(off.parser.has_resource_schedule( off.get_tag_value(i), 'off')) self.assertTrue(off.parser.keys_are_valid( off.get_tag_value(i))) self.assertEqual(off.parser.raw_data( off.get_tag_value(i)), {'tz': 'pt'}) def test_offhours(self): t = datetime.datetime(year=2015, month=12, day=1, hour=19, minute=5, tzinfo=zoneinfo.gettz('America/New_York')) with mock_datetime_now(t, datetime): i = instance(Tags=[ {'Key': 'maid_offhours', 'Value': 'tz=est'}]) self.assertEqual(OffHour({})(i), True) def test_onhour(self): t = datetime.datetime(year=2015, month=12, day=1, hour=7, minute=5, tzinfo=zoneinfo.gettz('America/New_York')) with mock_datetime_now(t, datetime): i = instance(Tags=[ {'Key': 'maid_offhours', 'Value': 'tz=est'}]) self.assertEqual(OnHour({})(i), True) self.assertEqual(OnHour({'onhour': 8})(i), False) def test_cant_parse_tz(self): i = instance(Tags=[ {'Key': 'maid_offhours', 'Value': 'tz=evt'}]) self.assertEqual(OffHour({})(i), False) def test_custom_offhours(self): t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2016, month=5, day=26, hour=19, minute=00) results = [] with mock_datetime_now(t, datetime): for i in [instance(Tags=[{'Key': 'maid_offhours', 'Value': 'off=(m-f,19);on=(m-f,7);tz=et'}]), instance(Tags=[{'Key': 'maid_offhours', 'Value': 'off=(m-f,20);on=(m-f,7);tz=et'}])]: results.append(OffHour({})(i)) self.assertEqual(results, [True, False]) def test_custom_onhours(self): t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2016, month=5, day=26, hour=7, minute=00) results = [] with mock_datetime_now(t, datetime): for i in [instance(Tags=[{'Key': 'maid_offhours', 'Value': 'off=(m-f,19);on=(m-f,7);tz=et'}]), instance(Tags=[{'Key': 'maid_offhours', 'Value': 'off=(m-f,20);on=(m-f,9);tz=et'}])]: results.append(OnHour({})(i)) self.assertEqual(results, [True, False]) def test_arizona_tz(self): t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2016, month=5, day=26, hour=7, minute=00) with mock_datetime_now(t, datetime): i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'off=(m-f,19);on=(m-f,7);tz=at'}]) self.assertEqual(OnHour({})(i), True) i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'off=(m-f,20);on=(m-f,6);tz=ast'}]) self.assertEqual(OnHour({})(i), False) def test_custom_bad_tz(self): t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2016, month=5, day=26, hour=7, minute=00) with mock_datetime_now(t, datetime): i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'off=(m-f,19);on=(m-f,7);tz=et'}]) self.assertEqual(OnHour({})(i), True) i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'off=(m-f,20);on=(m-f,7);tz=abc'}]) self.assertEqual(OnHour({})(i), False) def test_custom_bad_hours(self): t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2016, month=5, day=26, hour=19, minute=00) # default error handling is to exclude the resource with mock_datetime_now(t, datetime): # This isn't considered a bad value, its basically omitted. i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'off=();tz=et'}]) self.assertEqual(OffHour({})(i), False) i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'off=(m-f,90);on=(m-f,7);tz=et'}]) # malformed value self.assertEqual(OffHour({})(i), False) t = t.replace(year=2016, month=5, day=26, hour=13, minute=00) with mock_datetime_now(t, datetime): i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'off=();tz=et'}]) # will go to default values, but not work due to default time self.assertEqual(OffHour({})(i), False) i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'off=(m-f,90);on=(m-f,7);tz=et'}]) self.assertEqual(OffHour({})(i), False) def test_tz_only(self): t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2016, month=5, day=26, hour=7, minute=00) with mock_datetime_now(t, datetime): i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'tz=est'}]) self.assertEqual(OnHour({})(i), True) def test_empty_tag(self): t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2016, month=5, day=26, hour=7, minute=00) with mock_datetime_now(t, datetime): i = instance(Tags=[{'Key': 'maid_offhours', 'Value': ''}]) self.assertEqual(OnHour({})(i), True) def test_on_tag(self): t = datetime.datetime.now(zoneinfo.gettz('America/New_York')) t = t.replace(year=2016, month=5, day=26, hour=7, minute=00) with mock_datetime_now(t, datetime): i = instance(Tags=[{'Key': 'maid_offhours', 'Value': 'on'}]) self.assertEqual(OnHour({})(i), True) class ScheduleParserTest(BaseTest): # table style test # list of (tag value, parse result) table = [ ################ # Standard cases ('off=(m-f,10);on=(m-f,7);tz=et', {'off': [{'days': [0, 1, 2, 3, 4], 'hour': 10}], 'on': [{'days': [0, 1, 2, 3, 4], 'hour': 7}], 'tz': 'et'}), ("off=[(m-f,9)];on=(m-s,10);tz=pt", {'off': [{'days': [0, 1, 2, 3, 4], 'hour': 9}], 'on': [{'days': [0, 1, 2, 3, 4, 5], 'hour': 10}], 'tz': 'pt'}), ("off=[(m-f,23)];on=(m-s,10);tz=pt", {'off': [{'days': [0, 1, 2, 3, 4], 'hour': 23}], 'on': [{'days': [0, 1, 2, 3, 4, 5], 'hour': 10}], 'tz': 'pt'}), ('off=(m-f,19);on=(m-f,7);tz=pst', {'off': [{'days': [0, 1, 2, 3, 4], 'hour': 19}], 'on': [{'days': [0, 1, 2, 3, 4], 'hour': 7}], 'tz': 'pst'}), # wrap around days (saturday, sunday, monday) ('on=[(s-m,10)];off=(s-m,19)', {'on': [{'days': [5, 6, 0], 'hour': 10}], 'off': [{'days': [5, 6, 0], 'hour': 19}], 'tz': 'et'}), # multiple single days specified ('on=[(m,9),(t,10),(w,7)];off=(m-u,19)', {'on': [{'days': [0], 'hour': 9}, {'days': [1], 'hour': 10}, {'days': [2], 'hour': 7}], 'off': [{'days': [0, 1, 2, 3, 4, 5, 6], 'hour': 19}], 'tz': 'et'}), # using brackets also works, if only single time set ('off=[m-f,20];on=[m-f,5];tz=est', {'on': [{'days': [0, 1, 2, 3, 4], 'hour': 5}], 'off': [{'days': [0, 1, 2, 3, 4], 'hour': 20}], 'tz': 'est'}), # same string, exercise cache lookup. ('off=[m-f,20];on=[m-f,5];tz=est', {'on': [{'days': [0, 1, 2, 3, 4], 'hour': 5}], 'off': [{'days': [0, 1, 2, 3, 4], 'hour': 20}], 'tz': 'est'}), ################ # Invalid Cases ('', None), # invalid day ('off=(1-2,12);on=(m-f,10);tz=est', None), # invalid hour ('off=(m-f,a);on=(m-f,10);tz=est', None), ('off=(m-f,99);on=(m-f,7);tz=pst', None), # invalid day ('off=(x-f,10);on=(m-f,10);tz=est', None), # no hour specified for on ('off=(m-f);on=(m-f,10);tz=est', None), # invalid day spec ('off=(m-t-f,12);on=(m-f,10);tz=est', None), # random extra ('off=(m-f,5);zebra=blue,on=(t-w,5)', None), ('off=(m-f,5);zebra=blue;on=(t-w,5)', None), # random extra again ('off=(m-f,5);zebrablue,on=(t-w,5)', None), ('bar;off=(m-f,5);zebrablue,on=(t-w,5)', None), ] def test_schedule_parser(self): self.maxDiff = None parser = ScheduleParser({'tz': 'et'}) for value, expected in self.table: self.assertEqual(parser.parse(value), expected)<|fim▁end|>
<|file_name|>radio.js<|end_file_name|><|fim▁begin|>/* eslint-env mocha */ const mockBot = require('../mockBot') const assert = require('assert') const mockery = require('mockery') const sinon = require('sinon') const json = JSON.stringify({ state: 'normal', nowTitle: 'Midnight News', nowInfo: '20/03/2019', nextStart: '2019-03-20T00:30:00Z', nextTitle: 'Book of the Week' }) <|fim▁hole|> let sandbox before(function () { // mockery mocks the entire require() mockery.enable() mockery.registerMock('request-promise', { get: () => Promise.resolve(json) }) // sinon stubs individual functions sandbox = sinon.sandbox.create() mockBot.loadModule('radio') }) it('should parse the API correctly', async function () { sandbox.useFakeTimers({ now: 1553040900000 }) // 2019-03-20T00:15:00Z const promise = await mockBot.runCommand('!r4') assert.strictEqual(promise, 'Now: \u000300Midnight News\u000f \u000315(20/03/2019)\u000f followed by Book of the Week in 15 minutes (12:30 am)') }) after(function (done) { mockery.disable() mockery.deregisterAll() done() }) })<|fim▁end|>
describe('radio module', function () {
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># To change this license header, choose License Headers in Project Properties.<|fim▁hole|><|fim▁end|>
# To change this template file, choose Tools | Templates # and open the template in the editor.
<|file_name|>test-registry-datasets.py<|end_file_name|><|fim▁begin|>"""Tests for registry module - datasets method""" import vcr from pygbif import registry @vcr.use_cassette("test/vcr_cassettes/test_datasets.yaml") def test_datasets(): "registry.datasets - basic test" res = registry.datasets() assert dict == res.__class__ @vcr.use_cassette("test/vcr_cassettes/test_datasets_limit.yaml") def test_datasets_limit(): "registry.datasets - limit param"<|fim▁hole|> res = registry.datasets(limit=1) assert dict == res.__class__ assert 1 == len(res["results"]) res = registry.datasets(limit=3) assert dict == res.__class__ assert 3 == len(res["results"]) @vcr.use_cassette("test/vcr_cassettes/test_datasets_type.yaml") def test_datasets_type(): "registry.datasets - type param" res = registry.datasets(type="OCCURRENCE") vv = [x["type"] for x in res["results"]] assert dict == res.__class__ assert 100 == len(res["results"]) assert "OCCURRENCE" == list(set(vv))[0]<|fim▁end|>
<|file_name|>lint.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Runs Android's lint tool.""" import argparse import os import re import sys import traceback from xml.dom import minidom from util import build_utils _SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) def _OnStaleMd5(changes, lint_path, config_path, processed_config_path, manifest_path, result_path, product_dir, sources, jar_path, cache_dir, android_sdk_version, resource_dir=None, classpath=None, can_fail_build=False, silent=False): def _RelativizePath(path): """Returns relative path to top-level src dir. Args: path: A path relative to cwd. """ return os.path.relpath(os.path.abspath(path), _SRC_ROOT) def _ProcessConfigFile(): if not config_path or not processed_config_path: return if not build_utils.IsTimeStale(processed_config_path, [config_path]): return with open(config_path, 'rb') as f: content = f.read().replace( 'PRODUCT_DIR', _RelativizePath(product_dir)) with open(processed_config_path, 'wb') as f: f.write(content) def _ProcessResultFile(): with open(result_path, 'rb') as f: content = f.read().replace( _RelativizePath(product_dir), 'PRODUCT_DIR') with open(result_path, 'wb') as f: f.write(content) def _ParseAndShowResultFile(): dom = minidom.parse(result_path) issues = dom.getElementsByTagName('issue') if not silent: print >> sys.stderr for issue in issues: issue_id = issue.attributes['id'].value message = issue.attributes['message'].value location_elem = issue.getElementsByTagName('location')[0] path = location_elem.attributes['file'].value line = location_elem.getAttribute('line') if line: error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id) else: # Issues in class files don't have a line number. error = '%s %s: %s [warning]' % (path, message, issue_id) print >> sys.stderr, error.encode('utf-8') for attr in ['errorLine1', 'errorLine2']: error_line = issue.getAttribute(attr) if error_line: print >> sys.stderr, error_line.encode('utf-8') return len(issues) # Need to include all sources when a resource_dir is set so that resources are # not marked as unused. # TODO(agrieve): Figure out how IDEs do incremental linting. if not resource_dir and changes.AddedOrModifiedOnly(): changed_paths = set(changes.IterChangedPaths()) sources = [s for s in sources if s in changed_paths] with build_utils.TempDir() as temp_dir: _ProcessConfigFile()<|fim▁hole|> _RelativizePath(lint_path), '-Werror', '--exitcode', '--showall', '--xml', _RelativizePath(result_path), ] if jar_path: # --classpath is just for .class files for this one target. cmd.extend(['--classpath', _RelativizePath(jar_path)]) if processed_config_path: cmd.extend(['--config', _RelativizePath(processed_config_path)]) if resource_dir: cmd.extend(['--resources', _RelativizePath(resource_dir)]) if classpath: # --libraries is the classpath (excluding active target). cp = ':'.join(_RelativizePath(p) for p in classpath) cmd.extend(['--libraries', cp]) # There may be multiple source files with the same basename (but in # different directories). It is difficult to determine what part of the path # corresponds to the java package, and so instead just link the source files # into temporary directories (creating a new one whenever there is a name # conflict). src_dirs = [] def NewSourceDir(): new_dir = os.path.join(temp_dir, str(len(src_dirs))) os.mkdir(new_dir) src_dirs.append(new_dir) return new_dir def PathInDir(d, src): return os.path.join(d, os.path.basename(src)) for src in sources: src_dir = None for d in src_dirs: if not os.path.exists(PathInDir(d, src)): src_dir = d break if not src_dir: src_dir = NewSourceDir() cmd.extend(['--sources', _RelativizePath(src_dir)]) os.symlink(os.path.abspath(src), PathInDir(src_dir, src)) project_dir = NewSourceDir() if android_sdk_version: # Create dummy project.properies file in a temporary "project" directory. # It is the only way to add Android SDK to the Lint's classpath. Proper # classpath is necessary for most source-level checks. with open(os.path.join(project_dir, 'project.properties'), 'w') \ as propfile: print >> propfile, 'target=android-{}'.format(android_sdk_version) # Put the manifest in a temporary directory in order to avoid lint detecting # sibling res/ and src/ directories (which should be pass explicitly if they # are to be included). if manifest_path: os.symlink(os.path.abspath(manifest_path), PathInDir(project_dir, manifest_path)) cmd.append(project_dir) if os.path.exists(result_path): os.remove(result_path) env = {} stderr_filter = None if cache_dir: env['_JAVA_OPTIONS'] = '-Duser.home=%s' % _RelativizePath(cache_dir) # When _JAVA_OPTIONS is set, java prints to stderr: # Picked up _JAVA_OPTIONS: ... # # We drop all lines that contain _JAVA_OPTIONS from the output stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l) try: build_utils.CheckOutput(cmd, cwd=_SRC_ROOT, env=env or None, stderr_filter=stderr_filter) except build_utils.CalledProcessError: # There is a problem with lint usage if not os.path.exists(result_path): raise # Sometimes produces empty (almost) files: if os.path.getsize(result_path) < 10: if can_fail_build: raise elif not silent: traceback.print_exc() return # There are actual lint issues try: num_issues = _ParseAndShowResultFile() except Exception: # pylint: disable=broad-except if not silent: print 'Lint created unparseable xml file...' print 'File contents:' with open(result_path) as f: print f.read() if not can_fail_build: return if can_fail_build and not silent: traceback.print_exc() # There are actual lint issues try: num_issues = _ParseAndShowResultFile() except Exception: # pylint: disable=broad-except if not silent: print 'Lint created unparseable xml file...' print 'File contents:' with open(result_path) as f: print f.read() raise _ProcessResultFile() msg = ('\nLint found %d new issues.\n' ' - For full explanation refer to %s\n' % (num_issues, _RelativizePath(result_path))) if config_path: msg += (' - Wanna suppress these issues?\n' ' 1. Read comment in %s\n' ' 2. Run "python %s %s"\n' % (_RelativizePath(config_path), _RelativizePath(os.path.join(_SRC_ROOT, 'build', 'android', 'lint', 'suppress.py')), _RelativizePath(result_path))) if not silent: print >> sys.stderr, msg if can_fail_build: raise Exception('Lint failed.') def main(): parser = argparse.ArgumentParser() build_utils.AddDepfileOption(parser) parser.add_argument('--lint-path', required=True, help='Path to lint executable.') parser.add_argument('--product-dir', required=True, help='Path to product dir.') parser.add_argument('--result-path', required=True, help='Path to XML lint result file.') parser.add_argument('--cache-dir', required=True, help='Path to the directory in which the android cache ' 'directory tree should be stored.') parser.add_argument('--platform-xml-path', required=True, help='Path to api-platforms.xml') parser.add_argument('--android-sdk-version', help='Version (API level) of the Android SDK used for ' 'building.') parser.add_argument('--create-cache', action='store_true', help='Mark the lint cache file as an output rather than ' 'an input.') parser.add_argument('--can-fail-build', action='store_true', help='If set, script will exit with nonzero exit status' ' if lint errors are present') parser.add_argument('--config-path', help='Path to lint suppressions file.') parser.add_argument('--enable', action='store_true', help='Run lint instead of just touching stamp.') parser.add_argument('--jar-path', help='Jar file containing class files.') parser.add_argument('--java-files', help='Paths to java files.') parser.add_argument('--manifest-path', help='Path to AndroidManifest.xml') parser.add_argument('--classpath', default=[], action='append', help='GYP-list of classpath .jar files') parser.add_argument('--processed-config-path', help='Path to processed lint suppressions file.') parser.add_argument('--resource-dir', help='Path to resource dir.') parser.add_argument('--silent', action='store_true', help='If set, script will not log anything.') parser.add_argument('--src-dirs', help='Directories containing java files.') parser.add_argument('--stamp', help='Path to touch on success.') args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:])) if args.enable: sources = [] if args.src_dirs: src_dirs = build_utils.ParseGypList(args.src_dirs) sources = build_utils.FindInDirectories(src_dirs, '*.java') elif args.java_files: sources = build_utils.ParseGypList(args.java_files) if args.config_path and not args.processed_config_path: parser.error('--config-path specified without --processed-config-path') elif args.processed_config_path and not args.config_path: parser.error('--processed-config-path specified without --config-path') input_paths = [ args.lint_path, args.platform_xml_path, ] if args.config_path: input_paths.append(args.config_path) if args.jar_path: input_paths.append(args.jar_path) if args.manifest_path: input_paths.append(args.manifest_path) if args.resource_dir: input_paths.extend(build_utils.FindInDirectory(args.resource_dir, '*')) if sources: input_paths.extend(sources) classpath = [] for gyp_list in args.classpath: classpath.extend(build_utils.ParseGypList(gyp_list)) input_paths.extend(classpath) input_strings = [] if args.android_sdk_version: input_strings.append(args.android_sdk_version) if args.processed_config_path: input_strings.append(args.processed_config_path) output_paths = [ args.result_path ] build_utils.CallAndWriteDepfileIfStale( lambda changes: _OnStaleMd5(changes, args.lint_path, args.config_path, args.processed_config_path, args.manifest_path, args.result_path, args.product_dir, sources, args.jar_path, args.cache_dir, args.android_sdk_version, resource_dir=args.resource_dir, classpath=classpath, can_fail_build=args.can_fail_build, silent=args.silent), args, input_paths=input_paths, input_strings=input_strings, output_paths=output_paths, pass_changes=True, depfile_deps=classpath) if __name__ == '__main__': sys.exit(main())<|fim▁end|>
cmd = [
<|file_name|>reference.py<|end_file_name|><|fim▁begin|>import sys import pprint class Reference(object): def __init__(self, tb_index, varname, target): self.tb_index = tb_index self.varname = varname self.target = target def marker(self, xtb, tb_index, key): return Marker(self, xtb, tb_index, key) class Marker(object): def __init__(self, reference, xtb, tb_index, key): self.reference = reference self.xtb = xtb self.tb_index = tb_index self.key = key self.tb_offset = self.reference.tb_index - self.tb_index def __repr__(self): frame = sys._getframe(1) while frame: try: code = self.xtb._format_variable.func_code except AttributeError: # python 3<|fim▁hole|> frame = frame.f_back else: # pragma: no cover - defensive raise RuntimeError("Expecting to be called with " "XTraceback._format_variable in stack") pretty_repr = pprint.pformat(self.reference.target) if indent + len(self.key) + len(pretty_repr) > self.xtb.print_width \ or pretty_repr.find("\n") > 0: name = "" if self.reference.varname == self.key \ else " name=%s" % self.reference.varname pretty_repr = "<ref offset=%d%s>" % (self.tb_offset, name) return pretty_repr<|fim▁end|>
code = self.xtb._format_variable.__code__ if frame.f_code == code: indent = frame.f_locals["indent"] + 4 break
<|file_name|>wiggle.rs<|end_file_name|><|fim▁begin|>//! Dataflow node that generates/propagates/mutates a wiggle. use util::{modulo_one, almost_eq, angle_almost_eq}; use network::{Network, NodeIndex, GenerationId, NodeId, OutputId, Inputs, Outputs}; use console_server::reactor::Messages; use wiggles_value::{Data, Unipolar, Datatype}; use wiggles_value::knob::{Knobs, Response as KnobResponse}; use std::collections::HashMap; use std::time::Duration; use std::fmt; use std::any::Any; use serde::{Serialize, Serializer}; use serde::de::DeserializeOwned; use serde_json::{Error as SerdeJsonError, self}; use super::serde::SerializableWiggle; use clocks::clock::{ClockId, ClockProvider}; pub type KnobAddr = u32; // We need to qualify the knob's address with the wiggle's address to go up into the network. pub type WiggleKnobAddr = (WiggleId, KnobAddr); pub trait WiggleProvider { fn get_value( &self, wiggle_id: WiggleId, output_id: OutputId, phase_offset: f64, type_hint: Option<Datatype>, clocks: &ClockProvider) -> Data; } pub trait Wiggle { /// A string name for this kind of wiggle. /// This string will be used during serialization and deserialization to uniquely identify /// how to reconstruct this wiggle from a serialized form. fn kind(&self) -> &'static str; /// Return the name that has been assigned to this wiggle. fn name(&self) -> &str; /// Rename this wiggle. fn set_name(&mut self, name: String); /// Update the state of this wiggle using the provided update interval. /// Return a message collection of some kind. fn update(&mut self, dt: Duration) -> Messages<KnobResponse<KnobAddr>>; /// Render the state of this wiggle, providing its currently-assigned inputs as well as a /// function that can be used to retrieve the current value of one of those inputs. /// Specify which output port this wiggle should be rendered for. /// Also provide access to the clock network if this node needs it. fn render( &self, phase_offset: f64, type_hint: Option<Datatype>, inputs: &[Option<(WiggleId, OutputId)>], output: OutputId, network: &WiggleProvider, clocks: &ClockProvider) -> Data; /// Return Ok if this wiggle uses a clock input, and return the current value of it. /// If it doesn't use a clock, return Err. fn clock_source(&self) -> Result<Option<ClockId>, ()>; /// Set the clock source for this wiggle. /// If this wiggle doesn't use a clock, return Err. fn set_clock(&mut self, source: Option<ClockId>) -> Result<(), ()>; /// Serialize yourself into JSON. /// Every wiggle must implement this separately until an erased_serde solution is back in /// action. fn as_json(&self) -> Result<String, SerdeJsonError>; fn serializable(&self) -> Result<SerializableWiggle, SerdeJsonError> { Ok(SerializableWiggle { kind: self.kind().to_string(), serialized: self.as_json()?, }) } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub struct WiggleId(NodeIndex, GenerationId); impl fmt::Display for WiggleId { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "wiggle {}, generation {}", self.0, self.1) } } impl NodeId for WiggleId { fn new(idx: NodeIndex, gen_id: GenerationId) -> Self { WiggleId(idx, gen_id) } fn index(&self) -> NodeIndex { self.0 } fn gen_id(&self) -> GenerationId { self.1 } } /// Type alias for a network of wiggles. pub type WiggleNetwork = Network<Box<CompleteWiggle>, WiggleId, KnobResponse<WiggleKnobAddr>>; impl WiggleProvider for WiggleNetwork { fn get_value( &self, wiggle_id: WiggleId, output_id: OutputId, phase_offset: f64, type_hint: Option<Datatype>, clocks: &ClockProvider) -> Data { // if we don't have this node, return a default match self.node(wiggle_id) { Err(e) => { error!("Error while trying to get wiggle from {}: {}.", wiggle_id, e); Data::default_with_type_hint(type_hint) } Ok(node) => { node.inner().render(phase_offset, type_hint, node.inputs(), output_id, self, clocks) } } } } pub trait CompleteWiggle: Wiggle + Inputs<KnobResponse<WiggleKnobAddr>, WiggleId> + Outputs<KnobResponse<WiggleKnobAddr>, WiggleId> + Knobs<KnobAddr> + fmt::Debug { fn eq(&self, other: &CompleteWiggle) -> bool; fn as_any(&self) -> &Any; } impl<T> CompleteWiggle for T where T: 'static + Wiggle + Inputs<KnobResponse<WiggleKnobAddr>, WiggleId> + Outputs<KnobResponse<WiggleKnobAddr>, WiggleId> + Knobs<KnobAddr> + fmt::Debug + PartialEq { fn eq(&self, other: &CompleteWiggle) -> bool { other.as_any().downcast_ref::<T>().map_or(false, |x| x == self) } fn as_any(&self) -> &Any { self } } impl<'a, 'b> PartialEq<CompleteWiggle+'b> for CompleteWiggle + 'a { fn eq(&self, other: &(CompleteWiggle+'b)) -> bool { CompleteWiggle::eq(self, other) } } impl Outputs<KnobResponse<WiggleKnobAddr>, WiggleId> for Box<CompleteWiggle> { fn default_output_count(&self) -> u32 { (**self).default_output_count() } fn try_push_output( &mut self, node_id: WiggleId) -> Result<Messages<KnobResponse<WiggleKnobAddr>>, ()> { (**self).try_push_output(node_id) } fn try_pop_output( &mut self, node_id: WiggleId) -> Result<Messages<KnobResponse<WiggleKnobAddr>>, ()> { (**self).try_pop_output(node_id) } } // TODO: consider generalizing Update and/or Render as traits. /// Wrapper trait for a wiggle network. pub trait WiggleCollection { fn update(&mut self, dt: Duration) -> Messages<KnobResponse<WiggleKnobAddr>>; } impl WiggleCollection for WiggleNetwork { fn update(&mut self, dt: Duration) -> Messages<KnobResponse<WiggleKnobAddr>> { let mut update_messages = Messages::none(); { let update = |node_id: WiggleId, wiggle: &mut Box<CompleteWiggle>| { // lift the address of this message up into the network address space let address_lifter = |knob_num| (node_id, knob_num); let mut messages = wiggle.update(dt); for message in messages.drain() { let lifted_message = message.lift_address(&address_lifter); (&mut update_messages).push(lifted_message); } }; self.map_inner(update); } update_messages }<|fim▁hole|><|fim▁end|>
}
<|file_name|>org_delete.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import sys import argparse import regrws import regrws.method.org try: from apikey import APIKEY except ImportError: APIKEY = None epilog = 'API key can be omitted if APIKEY is defined in apikey.py' parser = argparse.ArgumentParser(epilog=epilog) parser.add_argument('-k', '--key', help='ARIN API key',<|fim▁hole|>parser.add_argument('-s', '--source-address', help='Source IP address') parser.add_argument('org_handle', metavar='ORG_HANDLE') args = parser.parse_args() if args.api_key: APIKEY = args.api_key session = regrws.restful.Session(APIKEY, args.source_address) method = regrws.method.org.Delete(session, args.org_handle) try: payload_out = method.call() except regrws.restful.RegRwsError as exception: print exception.args<|fim▁end|>
required=False if APIKEY else True, dest='api_key')
<|file_name|>DBtest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7 import os import sys this_dir = os.path.dirname(os.path.abspath(__file__))<|fim▁hole|>trunk_dir = os.path.split(this_dir)[0] sys.path.insert(0,trunk_dir) from ikol.dbregister import DataBase from ikol import var if os.path.exists(var.DB_PATH): os.remove(var.DB_PATH) DB = DataBase(var.DB_PATH) DB.insertPlaylist("loLWOCl7nlk","test") DB.insertPlaylist("loLWO357nlk","testb") DB.insertVideo("KDk2341oEQQ","loLWOCl7nlk","test") DB.insertVideo("KDktIWeoE23","loLWOCl7nlk","testb") print DB.getAllVideosByPlaylist("loLWOCl7nlk") print DB.getVideoById("KDk2341oEQQ")<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># ****************************************************************************** # Copyright 2014-2018 Intel Corporation<|fim▁hole|># You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ******************************************************************************<|fim▁end|>
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License.
<|file_name|>test_images.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import print_function from __future__ import unicode_literals from __future__ import division from PIL import Image from treemap.images import save_uploaded_image from treemap.tests import LocalMediaTestCase, media_dir class SaveImageTest(LocalMediaTestCase): @media_dir def test_rotates_image(self): sideways_file = self.load_resource('tree_sideways.jpg')<|fim▁hole|> img_file, _ = save_uploaded_image(sideways_file, 'test') expected_width, expected_height = Image.open(sideways_file).size actual_width, actual_height = Image.open(img_file).size self.assertEquals(expected_width, actual_height) self.assertEquals(expected_height, actual_width)<|fim▁end|>
<|file_name|>actions.js<|end_file_name|><|fim▁begin|>import { createAction, createThunkAction } from 'redux/actions'; import { getShortenUrl } from 'services/bitly'; import { saveAreaOfInterest } from 'components/forms/area-of-interest/actions'; export const setShareData = createAction('setShareData'); export const setShareUrl = createAction('setShareUrl');<|fim▁hole|> export const setShareModal = createThunkAction( 'setShareModal', (params) => (dispatch) => { const { shareUrl } = params; dispatch( setShareData({ ...params, }) ); getShortenUrl(shareUrl) .then((response) => { let shortShareUrl = ''; if (response.status < 400) { shortShareUrl = response.data.link; dispatch(setShareUrl(shortShareUrl)); } else { dispatch(setShareLoading(false)); } }) .catch(() => { dispatch(setShareLoading(false)); }); } ); export const setShareAoi = createThunkAction('shareModalSaveAoi', (params) => (dispatch) => { dispatch(saveAreaOfInterest({ ...params })) });<|fim▁end|>
export const setShareSelected = createAction('setShareSelected'); export const setShareOpen = createAction('setShareOpen'); export const setShareCopied = createAction('setShareCopied'); export const setShareLoading = createAction('setShareLoading');
<|file_name|>connect_ack_flags.rs<|end_file_name|><|fim▁begin|>use std::io::{Read, Write}; use std::convert::From; use byteorder::{ReadBytesExt, WriteBytesExt}; use control::variable_header::VariableHeaderError; use {Encodable, Decodable}; #[derive(Debug, Eq, PartialEq, Copy, Clone)] pub struct ConnackFlags { pub session_present: bool, } impl ConnackFlags { pub fn empty() -> ConnackFlags { ConnackFlags { session_present: false, } } } impl<'a> Encodable<'a> for ConnackFlags { type Err = VariableHeaderError; fn encode<W: Write>(&self, writer: &mut W) -> Result<(), VariableHeaderError> {<|fim▁hole|> fn encoded_length(&self) -> u32 { 1 } } impl<'a> Decodable<'a> for ConnackFlags { type Err = VariableHeaderError; type Cond = (); fn decode_with<R: Read>(reader: &mut R, _rest: Option<()>) -> Result<ConnackFlags, VariableHeaderError> { let code = try!(reader.read_u8()); if code & !1 != 0 { return Err(VariableHeaderError::InvalidReservedFlag); } Ok(ConnackFlags { session_present: code == 1, }) } }<|fim▁end|>
let code = self.session_present as u8; writer.write_u8(code).map_err(From::from) }
<|file_name|>any.rs<|end_file_name|><|fim▁begin|>use AsLua; use AsMutLua; use Push;<|fim▁hole|>use LuaRead; /// Represents any value that can be stored by Lua #[derive(Clone, Debug, PartialEq)] pub enum AnyLuaValue { LuaString(String), LuaNumber(f64), LuaBoolean(bool), LuaArray(Vec<(AnyLuaValue, AnyLuaValue)>), /// The "Other" element is (hopefully) temporary and will be replaced by "Function" and "Userdata". /// A panic! will trigger if you try to push a Other. LuaOther } impl<L> Push<L> for AnyLuaValue where L: AsMutLua { fn push_to_lua(self, lua: L) -> PushGuard<L> { match self { AnyLuaValue::LuaString(val) => val.push_to_lua(lua), AnyLuaValue::LuaNumber(val) => val.push_to_lua(lua), AnyLuaValue::LuaBoolean(val) => val.push_to_lua(lua), AnyLuaValue::LuaArray(_val) => unimplemented!(),//val.push_to_lua(lua), // FIXME: reached recursion limit during monomorphization AnyLuaValue::LuaOther => panic!("can't push a AnyLuaValue of type Other") } } } impl<L> LuaRead<L> for AnyLuaValue where L: AsLua { fn lua_read_at_position(lua: L, index: i32) -> Result<AnyLuaValue, L> { let lua = match LuaRead::lua_read_at_position(&lua, index) { Ok(v) => return Ok(AnyLuaValue::LuaNumber(v)), Err(lua) => lua }; let lua = match LuaRead::lua_read_at_position(&lua, index) { Ok(v) => return Ok(AnyLuaValue::LuaBoolean(v)), Err(lua) => lua }; let _lua = match LuaRead::lua_read_at_position(&lua, index) { Ok(v) => return Ok(AnyLuaValue::LuaString(v)), Err(lua) => lua }; /*let _lua = match LuaRead::lua_read_at_position(&lua, index) { Ok(v) => return Ok(AnyLuaValue::LuaArray(v)), Err(lua) => lua };*/ Ok(AnyLuaValue::LuaOther) } }<|fim▁end|>
use PushGuard;
<|file_name|>EventEmitter.test.js<|end_file_name|><|fim▁begin|>var expect = require('chai').expect, sinon = require('sinon'), EventEmitter = require('../src/EventEmitter'); describe('EventEmitter tests', function() { var emitter, foo, bar; beforeEach(function() { emitter = new EventEmitter(); foo = sinon.spy(); bar = sinon.spy(); }); describe('.on', function() { it('should throw error if foo is not a function', function() { var fn = emitter.on.bind(null, 'abc', 'abc'); expect(fn).to.throw(TypeError); }); it('should register event with emitter._events', function() { emitter.on('data', foo); expect(emitter._events.data[0]).to.equal(foo); }); it('should be able to register multiple foos', function() { emitter.on('data', foo); emitter.on('data', bar); expect(emitter._events.data[0]).to.equal(foo); expect(emitter._events.data[1]).to.equal(bar); }); it('should return itself', function() { expect(emitter.on('data', foo)).to.equal(emitter); }); it('emits newListener event with event name and listener args', function() { var emitSpy = sinon.spy(emitter, 'emit'); emitter.on('foo', foo); sinon.assert.calledOnce(emitSpy); sinon.assert.calledWith(emitSpy, 'newListener', 'foo', foo); }); }); describe('.emit', function() { beforeEach(function() { emitter.on('data', foo); emitter.on('data', bar); }); it('should trigger listeners bound to event', function() { emitter.emit('data'); expect(foo.calledOnce).to.be.true; expect(bar.calledOnce).to.be.true; }); it('should trigger listeners in order', function() { emitter.emit('data'); expect(foo.calledBefore(bar)).to.be.true; }); it('should apply arguments to each listener', function() { var arg1 = 1, arg2 = '2', arg3 = {}; emitter.emit('data', arg1, arg2, arg3); sinon.assert.calledWithExactly(foo, arg1, arg2, arg3); }); it('should bind "this" to the emitter in listener', function(done) { var fn = function() { expect(this).to.equal(emitter); done(); }; emitter.on('data', fn); emitter.emit('data'); }); it('should return true if listeners were fired', function() { expect(emitter.emit('data')).to.be.true; }); it('should return false if no listeners fired', function() { expect(emitter.emit('adf')).to.be.false; }); }); <|fim▁hole|> beforeEach(function() { emitter.on('foo', foo); emitter.on('foo', function() {}); emitter.on('bar', bar); }); it('should remove all listeners if no parameter', function() { emitter.removeAllListeners(); expect(emitter._events).to.be.empty; }); it('should only remove listeners to specified event', function() { emitter.removeAllListeners('foo'); expect(emitter._events.foo).to.be.undefined; expect(emitter._events.bar).to.not.be.undefined; }); it('should return the emitter', function() { expect(emitter.removeAllListeners()).to.equal(emitter); }); }); describe('.removeListener', function() { var baz; beforeEach(function() { baz = sinon.spy(); emitter.on('foo', foo); emitter.on('foo', baz); emitter.on('bar', bar); }); it('should remove only one listener for event', function() { emitter.removeListener('foo', baz); expect(emitter._events.foo.length).to.equal(1); expect(emitter._events.foo[0]).to.equal(foo); }); it('should throw error if listener is not a function', function() { var fn = emitter.removeListener.bind(emitter, 'foo', 'foo'); expect(fn).to.throw(TypeError); }); it('should return the emitter', function() { expect(emitter.removeListener('foo', foo)).to.equal(emitter); }); it('should be able to remove listener added by .once', function() { var qux = sinon.spy(); emitter.once('bar', qux); emitter.removeListener('bar', qux); expect(emitter._events.bar.length).to.equal(1); expect(emitter._events.bar[0]).to.equal(bar); }); it('should emit removeListener event with event name and listener args', function() { var emitSpy = sinon.spy(emitter, 'emit'); emitter.removeListener('foo', foo); sinon.assert.calledOnce(emitSpy); sinon.assert.calledWith(emitSpy, 'removeListener', 'foo', foo); }); }); describe('.once', function() { it('should throw error if listener is not a function', function() { var fn = emitter.once.bind(null, 'abc', 'abc'); expect(fn).to.throw(TypeError); }); it('should register a listener', function() { emitter.once('foo', foo); expect(emitter._events.foo.length).to.equal(1); }); it('should run registered function', function() { emitter.once('foo', foo); emitter.emit('foo'); expect(foo.calledOnce).to.be.true; }); it('should remove listener after .emit', function() { emitter.once('foo', foo); emitter.emit('foo'); expect(emitter._events.foo).to.be.empty; }); it('should pass all parameters from listener', function() { var arg1 = 1, arg2 = '2', arg3 = {}; emitter.once('foo', foo); emitter.emit('foo', arg1, arg2, arg3); sinon.assert.calledWithExactly(foo, arg1, arg2, arg3); }); it('should return the emitter', function() { expect(emitter.once('foo', foo)).to.equal(emitter); }); it('emits newListener event with event name and listener args', function() { var emitSpy = sinon.spy(emitter, 'emit'); emitter.once('foo', foo); sinon.assert.calledOnce(emitSpy); sinon.assert.calledWith(emitSpy, 'newListener', 'foo', foo); }); }); describe('.listeners', function() { beforeEach(function() { emitter.on('foo', foo); emitter.on('bar', bar); }); it('should return an array of listeners for an event', function() { expect(emitter.listeners('foo')).to.deep.equal([foo]); }); it('should return an empty array for unregistered events', function() { expect(emitter.listeners('abcd')).to.deep.equal([]); }); }); describe('.addListener', function() { it('should be alias to .on', function() { expect(emitter.addListener).to.equal(emitter.on); }); }); describe('.off', function() { it('should alias to .removeListener', function() { expect(emitter.off).to.equal(emitter.removeListener); }); }); describe('EventEmitter.listenerCount', function() { beforeEach(function() { emitter.on('foo', foo); emitter.on('foo', function() {}); emitter.on('bar', bar); }); it('should return 0 for non emitters', function() { expect(EventEmitter.listenerCount(1)).to.equal(0); }); it('should return 0 for no listeners', function() { expect(EventEmitter.listenerCount(emitter, 'baz')).to.equal(0); }); it('should return number of listeners', function() { expect(EventEmitter.listenerCount(emitter, 'foo')).to.equal(2); }); }); });<|fim▁end|>
describe('.removeAllListeners', function() {
<|file_name|>terminal-backend-module.ts<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2017 TypeFox and others. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 */ import { ContainerModule, Container } from 'inversify'; import { BackendApplicationContribution } from '@theia/core/lib/node'; import { TerminalBackendContribution } from "./terminal-backend-contribution"; import { ConnectionHandler, JsonRpcConnectionHandler } from "@theia/core/lib/common/messaging"; import { ShellProcess, ShellProcessFactory, ShellProcessOptions } from './shell-process'; import { ITerminalServer, terminalPath } from '../common/terminal-protocol'; import { IBaseTerminalClient } from '../common/base-terminal-protocol'; import { TerminalServer } from './terminal-server'; import { ILogger } from '@theia/core/lib/common/logger'; import { IShellTerminalServer, shellTerminalPath } from '../common/shell-terminal-protocol'; import { ShellTerminalServer } from '../node/shell-terminal-server'; export default new ContainerModule(bind => { bind(BackendApplicationContribution).to(TerminalBackendContribution); bind(ITerminalServer).to(TerminalServer).inSingletonScope(); bind(IShellTerminalServer).to(ShellTerminalServer).inSingletonScope(); bind(ShellProcess).toSelf().inTransientScope(); bind(ShellProcessFactory).toFactory(ctx => (options: ShellProcessOptions) => { const child = new Container({ defaultScope: 'Singleton' }); child.parent = ctx.container;<|fim▁hole|> child.bind(ShellProcessOptions).toConstantValue({}); child.bind(ILogger).toConstantValue(loggerChild); return child.get(ShellProcess); } ); bind(ConnectionHandler).toDynamicValue(ctx => new JsonRpcConnectionHandler<IBaseTerminalClient>(terminalPath, client => { const terminalServer = ctx.container.get<ITerminalServer>(ITerminalServer); terminalServer.setClient(client); return terminalServer; }) ).inSingletonScope(); bind(ConnectionHandler).toDynamicValue(ctx => new JsonRpcConnectionHandler<IBaseTerminalClient>(shellTerminalPath, client => { const shellTerminalServer = ctx.container.get<ITerminalServer>(IShellTerminalServer); shellTerminalServer.setClient(client); return shellTerminalServer; }) ).inSingletonScope(); });<|fim▁end|>
const logger = ctx.container.get<ILogger>(ILogger); const loggerChild = logger.child({ 'module': 'terminal-backend' });
<|file_name|>matchMakingThread.py<|end_file_name|><|fim▁begin|>#This will be the thread responsible for the matchmaking which operates as follows: #There are four lists where the players are divided into based on their rank. #List 1 is for ranks 0,1,2. #List 2 is for ranks 3,4,5. #List 3 is for ranks 6,7,8. #List 4 is for ranks 9,10. #When a player waits for a match too long, this thread will start looking for #players in adjacent lists, first in the higher category list and then in the #lower one. #Each player has a dictionary associated with him, which will store his info #and some other parameters, like his network info to connect to him. #This thread support only 2 operations: # 1) Add to match making lists # 2) Terminate itself MAX_LOOPS = 10 MAX_WAIT = 10 import Queue,time,random #inputQueue is for getting players from account threads #outputQueue is for sending match tokens to the thread that handles the matches<|fim▁hole|>def mmThread(inputQueue,exitQueue,outputQueue): #Lists for all difficulties noviceList = [] apprenticeList = [] adeptList = [] expertList = [] #put them in one list playerList = [noviceList,apprenticeList,adeptList,expertList] #This list contains the players that have waited for too long in their Queue needRematch = [] while True: loopCounter = 0 #Check for exit signal try: exit = exitQueue.get(False) if exit: break except: pass #loop over new entries at most MAX_LOOPS times then do it again while loopCounter < MAX_LOOPS: try: #Get new player and add him to a list according to his rank newPlayer = inputQueue.get(False) playerRank = newPlayer.get('rank') listIndex = playerRank // 3 newPlayer['entryTime'] = time.time() playerList[listIndex].append(newPlayer) print 'MMTHREAD : Got user ' print 'MMTHREAD: USER RANK IS %d ' % playerRank except Queue.Empty: break loopCounter += 1 #First check for players in the rematch Queue for player in needRematch[:]: position = player.get('rank') // 3 foundMatch = False #Check for empty list if len(playerList[position]) == 0 or playerList[position][0] != player: continue #Check for enemy player one list above this player if position + 1 < len(playerList) and len(playerList[position+1]) >= 1: foundMatch = True firstPlayer = playerList[position].pop(0) secondPlayer = playerList[position+1].pop(0) needRematch.remove(player) elif (position - 1 >= 0) and len(playerList[position-1]) >= 1: #Else check for enemy player one list below this player foundMatch = True firstPlayer = playerList[position].pop(0) secondPlayer = playerList[position-1].pop(0) needRematch.remove(player) #Add player tokens to Queue for game play thread if foundMatch: bothPlayers = [firstPlayer,secondPlayer] data = {'turn':0,'players':bothPlayers} print'Add new Player token' outputQueue.put(data) #Match players in same list for category in playerList: while True: try: #Try to pop two players from the list #If successfull, put their token into game play thread Queue firstPlayer = None secondPlayer = None firstPlayer = category.pop(0) secondPlayer = category.pop(0) bothPlayers = [firstPlayer,secondPlayer] turn = random.randint(0,1) data = {'turn':turn,'players':bothPlayers} print'Add new Player token' outputQueue.put(data) except: #Else if only one player is found , but him back if secondPlayer == None and firstPlayer != None: category.insert(0,firstPlayer) break #Check for players that didnt find a match for a long time and alert thread for i in range(0,3): if len(playerList[i]) > 0: if time.time() - playerList[i][0].get('entryTime') >= MAX_WAIT: needRematch.append(playerList[i][0]) print 'match making thread out'<|fim▁end|>
#exitQueue is used for exiting the thread
<|file_name|>lights_controller.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import sys import remote_core as core import radio_lights def main(argv): config = core.load_config() lights_config_names = {"1":"door_light", "2":"desk_light", "3": "shelf_light"}<|fim▁hole|> if argv[0] == "an": argv = ["1n", "2n", "3n"] elif argv[0] == "af": argv = ["1f", "2f", "3f"] for item in argv: if item[-1:] == 'n': radio_lights.turn_on_single(config["lights"][lights_config_names[item[:1]]]) elif item[-1:] == 'f': radio_lights.turn_off_single(config["lights"][lights_config_names[item[:1]]]) core.write_config(config) if __name__ == "__main__": main(sys.argv[1:])<|fim▁end|>
if len(argv) == 1 and len(argv[0]) == 2:
<|file_name|>xml.py<|end_file_name|><|fim▁begin|># The contents of this file are subject to the Mozilla Public License # Version 2.0 (the "License"); you may not use this file except in # compliance with the License. You may obtain a copy of the License at # http://www.mozilla.org/MPL/ # # Software distributed under the License is distributed on an "AS IS"basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License # for the specific language governing rights and limitations under the # License. # # OS2Webscanner was developed by Magenta in collaboration with OS2 the # Danish community of open source municipalities (http://www.os2web.dk/). # # The code is currently governed by OS2 the Danish community of open # source municipalities ( http://www.os2web.dk/ ) """HTML Processors.""" from .processor import Processor from .text import TextProcessor import logging import os import xmltodict import json from xml.parsers.expat import ExpatError from .html import HTMLProcessor class XmlProcessor(HTMLProcessor): """Processor for XMLdocuments. When processing, converts document to json one line including all attributes Immediately processes with TextProcessor after processing. """ item_type = "xml" text_processor = TextProcessor() def handle_spider_item(self, data, url_object): """Immediately process the spider item.""" return self.process(data, url_object) def handle_queue_item(self, item): """Immediately process the queue item.""" result = self.process_file(item.file_path, item.url) if os.path.exists(item.file_path): os.remove(item.file_path) return result def process(self, data, url_object): """Process XML data. Converts document to json before processing with TextProcessor. if XML is not well formed, treat it as HTML """ logging.info("Process XML %s" % url_object.url) try: data = json.dumps(xmltodict.parse(data)) return self.text_processor.process(data, url_object)<|fim▁hole|> return super(XmlProcessor,self).process(data,url_object) Processor.register_processor(XmlProcessor.item_type, XmlProcessor)<|fim▁end|>
except ExpatError:
<|file_name|>config_test.go<|end_file_name|><|fim▁begin|>package sshd // Tests ensuring that configuration-driven options translate to correct // behaviors. import ( "code.google.com/p/go.crypto/ssh" "testing" ) func TestConfigFromConfig(t *testing.T) { our_config := &Config{ Hostkey: []byte(testServerPrivateKey), BindAddr: "127.0.0.0:0", VcsRoot: "repos", } a := auther(true) var cfg *ssh.ServerConfig cfg = our_config.getSshServerConfig() if cfg.PublicKeyCallback != nil { t.Error("Pubkey callback was bound, but no implementation was provided. sshd will erroneously offer pubkey auth.") } if cfg.PasswordCallback != nil { t.Error("Password callback was bound, but no implementation was provided. sshd will erroneously offer pasword auth.") } our_config.KeyAuthenticator = a cfg = our_config.getSshServerConfig() if cfg.PublicKeyCallback == nil { t.Error("Pubkey callback was not bound, but an implementation was provided. sshd will erroneously not offer pubkey auth.") } if cfg.PasswordCallback != nil { t.Error("Password callback was bound, but no implementation was provided. sshd will erroneously offer pasword auth.") } our_config.PassAuthenticator = a cfg = our_config.getSshServerConfig() if cfg.PublicKeyCallback == nil {<|fim▁hole|> t.Error("Password callback was not bound, but an implementation were provided. sshd will erroneously not offer pasword auth.") } }<|fim▁end|>
t.Error("Pubkey callback was not bound, but an implementation was provided. sshd will erroneously not offer pubkey auth.") } if cfg.PasswordCallback == nil {
<|file_name|>regions-close-object-into-object-3.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>// // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(box_syntax)] #![allow(warnings)] use std::marker::PhantomFn; trait A<T> : PhantomFn<(Self,T)> {} struct B<'a, T>(&'a (A<T>+'a)); trait X : PhantomFn<Self> {} impl<'a, T> X for B<'a, T> {} fn h<'a, T, U>(v: Box<A<U>+'static>) -> Box<X+'static> { box B(&*v) as Box<X> //~ ERROR `*v` does not live long enough } fn main() {}<|fim▁end|>
// file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT.
<|file_name|>train_sequence_model.py<|end_file_name|><|fim▁begin|>"""Module to train sequence model. Vectorizes training and validation texts into sequences and uses that for training a sequence model - a sepCNN model. We use sequence model for text classification when the ratio of number of samples to number of words per sample for the given dataset is very large (>~15K). """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import time import tensorflow as tf import numpy as np import build_model import load_data import vectorize_data import explore_data FLAGS = None # Limit on the number of features. We use the top 20K features. TOP_K = 20000 def train_sequence_model(data, learning_rate=1e-3, epochs=1000, batch_size=128, blocks=2, filters=64, dropout_rate=0.2, embedding_dim=200, kernel_size=3, pool_size=3): """Trains sequence model on the given dataset. # Arguments data: tuples of training and test texts and labels. learning_rate: float, learning rate for training model. epochs: int, number of epochs. batch_size: int, number of samples per batch. blocks: int, number of pairs of sepCNN and pooling blocks in the model. filters: int, output dimension of sepCNN layers in the model. dropout_rate: float: percentage of input to drop at Dropout layers. embedding_dim: int, dimension of the embedding vectors. kernel_size: int, length of the convolution window. pool_size: int, factor by which to downscale input at MaxPooling layer. # Raises ValueError: If validation data has label values which were not seen in the training data. """ # Get the data. (train_texts, train_labels), (val_texts, val_labels) = data # Verify that validation labels are in the same range as training labels. num_classes = explore_data.get_num_classes(train_labels) unexpected_labels = [v for v in val_labels if v not in range(num_classes)] if len(unexpected_labels): raise ValueError('Unexpected label values found in the validation set:' ' {unexpected_labels}. Please make sure that the ' 'labels in the validation set are in the same range ' 'as training labels.'.format( unexpected_labels=unexpected_labels)) # Vectorize texts. x_train, x_val, word_index = vectorize_data.sequence_vectorize( train_texts, val_texts) # Number of features will be the embedding input dimension. Add 1 for the # reserved index 0. num_features = min(len(word_index) + 1, TOP_K) # Create model instance. model = build_model.sepcnn_model(blocks=blocks, filters=filters, kernel_size=kernel_size, embedding_dim=embedding_dim, dropout_rate=dropout_rate, pool_size=pool_size, input_shape=x_train.shape[1:], num_classes=num_classes, num_features=num_features) # Compile model with learning parameters. if num_classes == 2: loss = 'binary_crossentropy' else: loss = 'sparse_categorical_crossentropy' optimizer = tf.keras.optimizers.Adam(lr=learning_rate) model.compile(optimizer=optimizer, loss=loss, metrics=['acc']) # Create callback for early stopping on validation loss. If the loss does # not decrease in two consecutive tries, stop training. callbacks = [tf.keras.callbacks.EarlyStopping( monitor='val_loss', patience=2)] # Train and validate model. history = model.fit( x_train, train_labels, epochs=epochs, callbacks=callbacks, validation_data=(x_val, val_labels), verbose=2, # Logs once per epoch. batch_size=batch_size) # Print results. history = history.history print('Validation accuracy: {acc}, loss: {loss}'.format( acc=history['val_acc'][-1], loss=history['val_loss'][-1])) # Save model. model.save('rotten_tomatoes_sepcnn_model.h5') return history['val_acc'][-1], history['val_loss'][-1] if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--data_dir', type=str, default='./data', help='input data directory') FLAGS, unparsed = parser.parse_known_args() # Using the Rotten tomatoes movie reviews dataset to demonstrate # training sequence model. data = load_data.load_rotten_tomatoes_sentiment_analysis_dataset(<|fim▁hole|><|fim▁end|>
FLAGS.data_dir) train_sequence_model(data)
<|file_name|>delete_recorder_id_parameters.go<|end_file_name|><|fim▁begin|>// Code generated by go-swagger; DO NOT EDIT. // Copyright 2017-2022 Authors of Cilium // SPDX-License-Identifier: Apache-2.0 package recorder // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "context" "net/http" "time" "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" "github.com/go-openapi/strfmt" "github.com/go-openapi/swag" ) // NewDeleteRecorderIDParams creates a new DeleteRecorderIDParams object // with the default values initialized. func NewDeleteRecorderIDParams() *DeleteRecorderIDParams { var () return &DeleteRecorderIDParams{ timeout: cr.DefaultTimeout, } } // NewDeleteRecorderIDParamsWithTimeout creates a new DeleteRecorderIDParams object // with the default values initialized, and the ability to set a timeout on a request func NewDeleteRecorderIDParamsWithTimeout(timeout time.Duration) *DeleteRecorderIDParams { var () return &DeleteRecorderIDParams{ timeout: timeout, } } // NewDeleteRecorderIDParamsWithContext creates a new DeleteRecorderIDParams object // with the default values initialized, and the ability to set a context for a request func NewDeleteRecorderIDParamsWithContext(ctx context.Context) *DeleteRecorderIDParams { var () return &DeleteRecorderIDParams{ Context: ctx, } } // NewDeleteRecorderIDParamsWithHTTPClient creates a new DeleteRecorderIDParams object // with the default values initialized, and the ability to set a custom HTTPClient for a request func NewDeleteRecorderIDParamsWithHTTPClient(client *http.Client) *DeleteRecorderIDParams { var () return &DeleteRecorderIDParams{ HTTPClient: client, } } /*DeleteRecorderIDParams contains all the parameters to send to the API endpoint for the delete recorder ID operation typically these are written to a http.Request */ type DeleteRecorderIDParams struct { /*ID ID of recorder */ ID int64 timeout time.Duration Context context.Context HTTPClient *http.Client } // WithTimeout adds the timeout to the delete recorder ID params func (o *DeleteRecorderIDParams) WithTimeout(timeout time.Duration) *DeleteRecorderIDParams { o.SetTimeout(timeout) return o } // SetTimeout adds the timeout to the delete recorder ID params func (o *DeleteRecorderIDParams) SetTimeout(timeout time.Duration) { o.timeout = timeout } // WithContext adds the context to the delete recorder ID params func (o *DeleteRecorderIDParams) WithContext(ctx context.Context) *DeleteRecorderIDParams { o.SetContext(ctx) return o } // SetContext adds the context to the delete recorder ID params func (o *DeleteRecorderIDParams) SetContext(ctx context.Context) { o.Context = ctx } // WithHTTPClient adds the HTTPClient to the delete recorder ID params func (o *DeleteRecorderIDParams) WithHTTPClient(client *http.Client) *DeleteRecorderIDParams { o.SetHTTPClient(client) return o } // SetHTTPClient adds the HTTPClient to the delete recorder ID params func (o *DeleteRecorderIDParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } // WithID adds the id to the delete recorder ID params func (o *DeleteRecorderIDParams) WithID(id int64) *DeleteRecorderIDParams { o.SetID(id) return o } // SetID adds the id to the delete recorder ID params func (o *DeleteRecorderIDParams) SetID(id int64) {<|fim▁hole|> // WriteToRequest writes these params to a swagger request func (o *DeleteRecorderIDParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { if err := r.SetTimeout(o.timeout); err != nil { return err } var res []error // path param id if err := r.SetPathParam("id", swag.FormatInt64(o.ID)); err != nil { return err } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil }<|fim▁end|>
o.ID = id }
<|file_name|>webXRMotionControllerManager.ts<|end_file_name|><|fim▁begin|>import { WebXRAbstractMotionController, IMotionControllerProfile, } from './webXRAbstractMotionController'; import { WebXRGenericTriggerMotionController } from './webXRGenericMotionController'; import { Scene } from '../../scene'; import { Tools } from '../../Misc/tools'; import { WebXRProfiledMotionController } from './webXRProfiledMotionController'; /** * A construction function type to create a new controller based on an xrInput object */ export type MotionControllerConstructor = (xrInput: XRInputSource, scene: Scene) => WebXRAbstractMotionController; /** * The MotionController Manager manages all registered motion controllers and loads the right one when needed. * * When this repository is complete: https://github.com/immersive-web/webxr-input-profiles/tree/master/packages/assets * it should be replaced with auto-loaded controllers. * * When using a model try to stay as generic as possible. Eventually there will be no need in any of the controller classes */ export class WebXRMotionControllerManager { private static _AvailableControllers: { [type: string]: MotionControllerConstructor } = {}; private static _Fallbacks: { [profileId: string]: string[] } = {}; // cache for loading private static _ProfileLoadingPromises: { [profileName: string]: Promise<IMotionControllerProfile> } = {}; private static _ProfilesList: Promise<{ [profile: string]: string }>; /** * The base URL of the online controller repository. Can be changed at any time. */ public static BaseRepositoryUrl = "https://immersive-web.github.io/webxr-input-profiles/packages/viewer/dist"; /** * Which repository gets priority - local or online */ public static PrioritizeOnlineRepository: boolean = true; /** * Use the online repository, or use only locally-defined controllers */ public static UseOnlineRepository: boolean = true; /** * Clear the cache used for profile loading and reload when requested again */ public static ClearProfilesCache() { delete this._ProfilesList; this._ProfileLoadingPromises = {}; } /** * Register the default fallbacks. * This function is called automatically when this file is imported. */ public static DefaultFallbacks() { this.RegisterFallbacksForProfileId("google-daydream", ["generic-touchpad"]); this.RegisterFallbacksForProfileId("htc-vive-focus", ["generic-trigger-touchpad"]); this.RegisterFallbacksForProfileId("htc-vive", ["generic-trigger-squeeze-touchpad"]); this.RegisterFallbacksForProfileId("magicleap-one", ["generic-trigger-squeeze-touchpad"]); this.RegisterFallbacksForProfileId("windows-mixed-reality", ["generic-trigger-squeeze-touchpad-thumbstick"]); this.RegisterFallbacksForProfileId("microsoft-mixed-reality", ["windows-mixed-reality", "generic-trigger-squeeze-touchpad-thumbstick"]); this.RegisterFallbacksForProfileId("oculus-go", ["generic-trigger-touchpad"]); this.RegisterFallbacksForProfileId("oculus-touch-v2", ["oculus-touch", "generic-trigger-squeeze-thumbstick"]); this.RegisterFallbacksForProfileId("oculus-touch", ["generic-trigger-squeeze-thumbstick"]); this.RegisterFallbacksForProfileId("samsung-gearvr", ["windows-mixed-reality", "generic-trigger-squeeze-touchpad-thumbstick"]); this.RegisterFallbacksForProfileId("samsung-odyssey", ["generic-touchpad"]); this.RegisterFallbacksForProfileId("valve-index", ["generic-trigger-squeeze-touchpad-thumbstick"]); } /** * Find a fallback profile if the profile was not found. There are a few predefined generic profiles. * @param profileId the profile to which a fallback needs to be found * @return an array with corresponding fallback profiles */ public static FindFallbackWithProfileId(profileId: string): string[] { const returnArray = this._Fallbacks[profileId] || []; returnArray.unshift(profileId); return returnArray; } /** * When acquiring a new xrInput object (usually by the WebXRInput class), match it with the correct profile. * The order of search: * * 1) Iterate the profiles array of the xr input and try finding a corresponding motion controller * 2) (If not found) search in the gamepad id and try using it (legacy versions only) * 3) search for registered fallbacks (should be redundant, nonetheless it makes sense to check) * 4) return the generic trigger controller if none were found * * @param xrInput the xrInput to which a new controller is initialized * @param scene the scene to which the model will be added * @param forceProfile force a certain profile for this controller * @return A promise that fulfils with the motion controller class for this profile id or the generic standard class if none was found */ public static GetMotionControllerWithXRInput(xrInput: XRInputSource, scene: Scene, forceProfile?: string): Promise<WebXRAbstractMotionController> { const profileArray: string[] = []; if (forceProfile) { profileArray.push(forceProfile); } profileArray.push(...(xrInput.profiles || [])); // emulator support if (profileArray.length && !profileArray[0]) { // remove the first "undefined" that the emulator is adding profileArray.pop(); } // legacy support - try using the gamepad id if (xrInput.gamepad && xrInput.gamepad.id) { switch (xrInput.gamepad.id) { case (xrInput.gamepad.id.match(/oculus touch/gi) ? xrInput.gamepad.id : undefined): // oculus in gamepad id profileArray.push("oculus-touch-v2"); break; <|fim▁hole|> const windowsMRIdx = profileArray.indexOf("windows-mixed-reality"); if (windowsMRIdx !== -1) { profileArray.splice(windowsMRIdx, 0, "microsoft-mixed-reality"); } if (!profileArray.length) { profileArray.push("generic-trigger"); } if (this.UseOnlineRepository) { const firstFunction = this.PrioritizeOnlineRepository ? this._LoadProfileFromRepository : this._LoadProfilesFromAvailableControllers; const secondFunction = this.PrioritizeOnlineRepository ? this._LoadProfilesFromAvailableControllers : this._LoadProfileFromRepository; return firstFunction.call(this, profileArray, xrInput, scene).catch(() => { return secondFunction.call(this, profileArray, xrInput, scene); }); } else { // use only available functions return this._LoadProfilesFromAvailableControllers(profileArray, xrInput, scene); } } /** * Register a new controller based on its profile. This function will be called by the controller classes themselves. * * If you are missing a profile, make sure it is imported in your source, otherwise it will not register. * * @param type the profile type to register * @param constructFunction the function to be called when loading this profile */ public static RegisterController(type: string, constructFunction: MotionControllerConstructor) { this._AvailableControllers[type] = constructFunction; } /** * Register a fallback to a specific profile. * @param profileId the profileId that will receive the fallbacks * @param fallbacks A list of fallback profiles */ public static RegisterFallbacksForProfileId(profileId: string, fallbacks: string[]): void { if (this._Fallbacks[profileId]) { this._Fallbacks[profileId].push(...fallbacks); } else { this._Fallbacks[profileId] = fallbacks; } } /** * Will update the list of profiles available in the repository * @return a promise that resolves to a map of profiles available online */ public static UpdateProfilesList() { this._ProfilesList = Tools.LoadFileAsync(this.BaseRepositoryUrl + '/profiles/profilesList.json', false).then((data) => { return JSON.parse(data.toString()); }); return this._ProfilesList; } private static _LoadProfileFromRepository(profileArray: string[], xrInput: XRInputSource, scene: Scene): Promise<WebXRAbstractMotionController> { return Promise.resolve().then(() => { if (!this._ProfilesList) { return this.UpdateProfilesList(); } else { return this._ProfilesList; } }).then((profilesList: { [profile: string]: string }) => { // load the right profile for (let i = 0; i < profileArray.length; ++i) { // defensive if (!profileArray[i]) { continue; } if (profilesList[profileArray[i]]) { return profileArray[i]; } } throw new Error(`neither controller ${profileArray[0]} nor all fallbacks were found in the repository,`); }).then((profileToLoad: string) => { // load the profile if (!this._ProfileLoadingPromises[profileToLoad]) { this._ProfileLoadingPromises[profileToLoad] = Tools.LoadFileAsync(`${this.BaseRepositoryUrl}/profiles/${profileToLoad}/profile.json`, false).then((data) => <IMotionControllerProfile>JSON.parse(data as string)); } return this._ProfileLoadingPromises[profileToLoad]; }).then((profile: IMotionControllerProfile) => { return new WebXRProfiledMotionController(scene, xrInput, profile, this.BaseRepositoryUrl); }); } private static _LoadProfilesFromAvailableControllers(profileArray: string[], xrInput: XRInputSource, scene: Scene) { // check fallbacks for (let i = 0; i < profileArray.length; ++i) { // defensive if (!profileArray[i]) { continue; } const fallbacks = this.FindFallbackWithProfileId(profileArray[i]); for (let j = 0; j < fallbacks.length; ++j) { const constructionFunction = this._AvailableControllers[fallbacks[j]]; if (constructionFunction) { return Promise.resolve(constructionFunction(xrInput, scene)); } } } throw new Error(`no controller requested was found in the available controllers list`); } } // register the generic profile(s) here so we will at least have them WebXRMotionControllerManager.RegisterController(WebXRGenericTriggerMotionController.ProfileId, (xrInput: XRInputSource, scene: Scene) => { return new WebXRGenericTriggerMotionController(scene, <any>(xrInput.gamepad), xrInput.handedness); }); // register fallbacks WebXRMotionControllerManager.DefaultFallbacks();<|fim▁end|>
} } // make sure microsoft/windows mixed reality works correctly
<|file_name|>test_cache_invocation.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import unittest import apt_pkg import apt.progress.base class TestCache(unittest.TestCase): """Test invocation of apt_pkg.Cache()""" def setUp(self): apt_pkg.init_config() apt_pkg.init_system() <|fim▁hole|> self.assertRaises(ValueError, apt_pkg.Cache, apt_cache) self.assertRaises(ValueError, apt_pkg.Cache, apt.progress.base.AcquireProgress()) self.assertRaises(ValueError, apt_pkg.Cache, 0) def test_proper_invocation(self): """cache_invocation: Test correct invocation.""" apt_cache = apt_pkg.Cache(progress=None) apt_depcache = apt_pkg.DepCache(apt_cache) if __name__ == "__main__": unittest.main()<|fim▁end|>
def test_wrong_invocation(self): """cache_invocation: Test wrong invocation.""" apt_cache = apt_pkg.Cache(progress=None)
<|file_name|>NoSuchProcessStepDossierPartException.java<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2000-2013 Liferay, Inc. All rights reserved. * * This library is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 2.1 of the License, or (at your option) * any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. */ package org.opencps.processmgt; import com.liferay.portal.NoSuchModelException; /** * @author khoavd */ public class NoSuchProcessStepDossierPartException extends NoSuchModelException { public NoSuchProcessStepDossierPartException() { super(); } public NoSuchProcessStepDossierPartException(String msg) { super(msg); } public NoSuchProcessStepDossierPartException(String msg, Throwable cause) {<|fim▁hole|> public NoSuchProcessStepDossierPartException(Throwable cause) { super(cause); } }<|fim▁end|>
super(msg, cause); }
<|file_name|>try_telethon.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import traceback from telethon_examples.interactive_telegram_client \<|fim▁hole|> import InteractiveTelegramClient def load_settings(path='api/settings'): """Loads the user settings located under `api/`""" result = {} with open(path, 'r', encoding='utf-8') as file: for line in file: value_pair = line.split('=') left = value_pair[0].strip() right = value_pair[1].strip() if right.isnumeric(): result[left] = int(right) else: result[left] = right return result if __name__ == '__main__': # Load the settings and initialize the client settings = load_settings() kwargs = {} if settings.get('socks_proxy'): import socks # $ pip install pysocks host, port = settings['socks_proxy'].split(':') kwargs = dict(proxy=(socks.SOCKS5, host, int(port))) client = InteractiveTelegramClient( session_user_id=str(settings.get('session_name', 'anonymous')), user_phone=str(settings['user_phone']), api_id=settings['api_id'], api_hash=str(settings['api_hash']), **kwargs) print('Initialization done!') try: client.run() except Exception as e: print('Unexpected error ({}): {} at\n{}'.format( type(e), e, traceback.format_exc())) finally: client.disconnect() print('Thanks for trying the interactive example! Exiting...')<|fim▁end|>
<|file_name|>nonlinear_solver.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ This is the module where the main solver object for the nonlinear solver of bolt is defined. This solver object stores the details of the system defined under physical_system, and is evolved using the methods of this module. The solver has the option of using 2 different methods: - A semi-lagrangian scheme based on Cheng-Knorr(1978) which uses advective interpolation.(non-conservative) - The interpolation schemes available are linear and cubic spline. - Finite volume scheme(conservative): - Riemann solvers available are the local Lax-Friedrichs and 1st order upwind scheme. - The reconstruction schemes available are minmod, PPM, and WENO5 """ # Importing dependencies: import arrayfire as af import numpy as np import petsc4py, sys petsc4py.init(sys.argv) from petsc4py import PETSc import socket # Importing solver libraries: from . import communicate from . import boundaries from . import timestep from .file_io import dump from .file_io import load from .utils.bandwidth_test import bandwidth_test from .utils.print_with_indent import indent from .utils.performance_timings import print_table from .utils.broadcasted_primitive_operations import multiply from .compute_moments import compute_moments as compute_moments_imported from .fields.fields import fields_solver class nonlinear_solver(object): """ An instance of this class' attributes contains methods which are used in evolving the system declared under physical system nonlinearly. The state of the system then may be determined from the attributes of the system such as the distribution function and electromagnetic fields. Relevant physical information is obtained by coarse graining this system by taking moments of the distribution function. This is performed by the compute_moments() method. """ def __init__(self, physical_system, performance_test_flag = False): """ Constructor for the nonlinear_solver object. It takes the physical system object as an argument and uses it in intialization and evolution of the system in consideration. Additionally, a performance test flag is also passed which when true, stores time which is consumed by each of the major solver routines. This proves particularly useful in analyzing performance bottlenecks and obtaining benchmarks. Parameters: ----------- physical_system: The defined physical system object which holds all the simulation information such as the initial conditions, and the domain info is passed as an argument in defining an instance of the nonlinear_solver. This system is then evolved, and monitored using the various methods under the nonlinear_solver class. """ self.physical_system = physical_system # Holding Domain Info: self.q1_start, self.q1_end = physical_system.q1_start,\ physical_system.q1_end self.q2_start, self.q2_end = physical_system.q2_start,\ physical_system.q2_end self.p1_start, self.p1_end = physical_system.p1_start,\ physical_system.p1_end self.p2_start, self.p2_end = physical_system.p2_start,\ physical_system.p2_end self.p3_start, self.p3_end = physical_system.p3_start,\ physical_system.p3_end # Holding Domain Resolution: self.N_q1, self.dq1 = physical_system.N_q1, physical_system.dq1 self.N_q2, self.dq2 = physical_system.N_q2, physical_system.dq2 self.N_p1, self.dp1 = physical_system.N_p1, physical_system.dp1 self.N_p2, self.dp2 = physical_system.N_p2, physical_system.dp2 self.N_p3, self.dp3 = physical_system.N_p3, physical_system.dp3 # Getting number of ghost zones, and the boundary # conditions that are utilized: N_g_q = self.N_ghost_q = physical_system.N_ghost_q N_g_p = self.N_ghost_p = physical_system.N_ghost_p self.boundary_conditions = physical_system.boundary_conditions # Declaring the communicator: self._comm = PETSc.COMM_WORLD.tompi4py() if(self.physical_system.params.num_devices>1): af.set_device(self._comm.rank%self.physical_system.params.num_devices) # Getting number of species: self.N_species = len(physical_system.params.mass) # Having the mass and charge along axis 1: self.physical_system.params.mass = \ af.cast(af.moddims(af.to_array(physical_system.params.mass), 1, self.N_species ), af.Dtype.f64 ) self.physical_system.params.charge = \ af.cast(af.moddims(af.to_array(physical_system.params.charge), 1, self.N_species ), af.Dtype.f64 ) PETSc.Sys.Print('\nBackend Details for Nonlinear Solver:') # Printing the backend details for each rank/device/node: PETSc.Sys.syncPrint(indent('Rank ' + str(self._comm.rank) + ' of ' + str(self._comm.size-1))) PETSc.Sys.syncPrint(indent('On Node: '+ socket.gethostname())) PETSc.Sys.syncPrint(indent('Device Details:')) PETSc.Sys.syncPrint(indent(af.info_str(), 2)) PETSc.Sys.syncPrint(indent('Device Bandwidth = ' + str(bandwidth_test(100)) + ' GB / sec')) PETSc.Sys.syncPrint() PETSc.Sys.syncFlush() self.performance_test_flag = performance_test_flag # Initializing variables which are used to time the components of the solver: if(performance_test_flag == True): self.time_ts = 0 self.time_interp2 = 0 self.time_sourcets = 0 self.time_fvm_solver = 0 self.time_reconstruct = 0 self.time_riemann = 0 self.time_fieldstep = 0 self.time_interp3 = 0 self.time_apply_bcs_f = 0 self.time_communicate_f = 0 petsc_bc_in_q1 = 'ghosted' petsc_bc_in_q2 = 'ghosted' # Only for periodic boundary conditions or shearing-box boundary conditions # do the boundary conditions passed to the DA need to be changed. PETSc # automatically handles the application of periodic boundary conditions when # running in parallel. For shearing box boundary conditions, an interpolation # operation needs to be applied on top of the periodic boundary conditions. # In all other cases, ghosted boundaries are used. if( self.boundary_conditions.in_q1_left == 'periodic' or self.boundary_conditions.in_q1_left == 'shearing-box' ): petsc_bc_in_q1 = 'periodic' if( self.boundary_conditions.in_q2_bottom == 'periodic' or self.boundary_conditions.in_q2_bottom == 'shearing-box' ): petsc_bc_in_q2 = 'periodic' if(self.boundary_conditions.in_q1_left == 'periodic'): try: assert(self.boundary_conditions.in_q1_right == 'periodic') except: raise Exception('Periodic boundary conditions need to be applied to \ both the boundaries of a particular axis' ) if(self.boundary_conditions.in_q1_left == 'shearing-box'): try: assert(self.boundary_conditions.in_q1_right == 'shearing-box') except: raise Exception('Shearing box boundary conditions need to be applied to \ both the boundaries of a particular axis' ) if(self.boundary_conditions.in_q2_bottom == 'periodic'): try: assert(self.boundary_conditions.in_q2_top == 'periodic') except: raise Exception('Periodic boundary conditions need to be applied to \ both the boundaries of a particular axis' ) if(self.boundary_conditions.in_q2_bottom == 'shearing-box'): try: assert(self.boundary_conditions.in_q2_top == 'shearing-box') except: raise Exception('Shearing box boundary conditions need to be applied to \ both the boundaries of a particular axis' ) nproc_in_q1 = PETSc.DECIDE nproc_in_q2 = PETSc.DECIDE # Since shearing boundary conditions require interpolations which are non-local: if(self.boundary_conditions.in_q2_bottom == 'shearing-box'): nproc_in_q1 = 1 if(self.boundary_conditions.in_q1_left == 'shearing-box'): nproc_in_q2 = 1 # DMDA is a data structure to handle a distributed structure # grid and its related core algorithms. It stores metadata of # how the grid is partitioned when run in parallel which is # utilized by the various methods of the solver. self._da_f = PETSc.DMDA().create([self.N_q1, self.N_q2], dof = ( self.N_species * (self.N_p1 + 2 * N_g_p) * (self.N_p2 + 2 * N_g_p) * (self.N_p3 + 2 * N_g_p) ), stencil_width = N_g_q, boundary_type = (petsc_bc_in_q1, petsc_bc_in_q2 ), proc_sizes = (nproc_in_q1, nproc_in_q2 ), stencil_type = 1, comm = self._comm ) # This DA is used by the FileIO routine dump_distribution_function(): self._da_dump_f = PETSc.DMDA().create([self.N_q1, self.N_q2], dof = ( self.N_species * self.N_p1 * self.N_p2 * self.N_p3 ), stencil_width = N_g_q, boundary_type = (petsc_bc_in_q1, petsc_bc_in_q2 ), proc_sizes = (nproc_in_q1, nproc_in_q2 ), stencil_type = 1, comm = self._comm ) # This DA is used by the FileIO routine dump_moments(): # Finding the number of definitions for the moments: attributes = [a for a in dir(self.physical_system.moments) if not a.startswith('_')] # Removing utility functions: if('integral_over_v' in attributes): attributes.remove('integral_over_v') self._da_dump_moments = PETSc.DMDA().create([self.N_q1, self.N_q2], dof = self.N_species * len(attributes), proc_sizes = (nproc_in_q1, nproc_in_q2 ), comm = self._comm ) # Creation of the local and global vectors from the DA: # This is for the distribution function self._glob_f = self._da_f.createGlobalVec() self._local_f = self._da_f.createLocalVec() # The following vector is used to dump the data to file: self._glob_dump_f = self._da_dump_f.createGlobalVec() self._glob_moments = self._da_dump_moments.createGlobalVec() # Getting the arrays for the above vectors: self._glob_f_array = self._glob_f.getArray() self._local_f_array = self._local_f.getArray() self._glob_moments_array = self._glob_moments.getArray() self._glob_dump_f_array = self._glob_dump_f.getArray() # Setting names for the objects which will then be # used as the key identifiers for the HDF5 files: PETSc.Object.setName(self._glob_dump_f, 'distribution_function') PETSc.Object.setName(self._glob_moments, 'moments') # Obtaining the array values of the cannonical variables: self.q1_center, self.q2_center = self._calculate_q_center() self.p1_center, self.p2_center, self.p3_center = self._calculate_p_center() # Initialize according to initial condition provided by user: self._initialize(physical_system.params) # Obtaining start coordinates for the local zone # Additionally, we also obtain the size of the local zone ((i_q1_start, i_q2_start), (N_q1_local, N_q2_local)) = self._da_f.getCorners() (i_q1_end, i_q2_end) = (i_q1_start + N_q1_local - 1, i_q2_start + N_q2_local - 1) # Applying dirichlet boundary conditions: if(self.physical_system.boundary_conditions.in_q1_left == 'dirichlet'): # If local zone includes the left physical boundary: if(i_q1_start == 0): self.f[:, :N_g_q] = self.boundary_conditions.\ f_left(self.f, self.q1_center, self.q2_center, self.p1_center, self.p2_center, self.p3_center, self.physical_system.params )[:, :N_g_q] if(self.physical_system.boundary_conditions.in_q1_right == 'dirichlet'): # If local zone includes the right physical boundary: if(i_q1_end == self.N_q1 - 1): self.f[:, -N_g_q:] = self.boundary_conditions.\ f_right(self.f, self.q1_center, self.q2_center, self.p1_center, self.p2_center, self.p3_center, self.physical_system.params )[:, -N_g_q:] if(self.physical_system.boundary_conditions.in_q2_bottom == 'dirichlet'): # If local zone includes the bottom physical boundary: if(i_q2_start == 0): self.f[:, :, :N_g_q] = self.boundary_conditions.\ f_bot(self.f, self.q1_center, self.q2_center, self.p1_center, self.p2_center, self.p3_center, self.physical_system.params )[:, :, :N_g_q] if(self.physical_system.boundary_conditions.in_q2_top == 'dirichlet'): # If local zone includes the top physical boundary: if(i_q2_end == self.N_q2 - 1): self.f[:, :, -N_g_q:] = self.boundary_conditions.\ f_top(self.f, self.q1_center, self.q2_center, self.p1_center, self.p2_center, self.p3_center, self.physical_system.params )[:, :, -N_g_q:] # Assigning the value to the PETSc Vecs(for dump at t = 0): (af.flat(self.f)).to_ndarray(self._local_f_array) (af.flat(self.f[:, :, N_g_q:-N_g_q, N_g_q:-N_g_q])).to_ndarray(self._glob_f_array) # Assigning the function objects to methods of the solver: self._A_q = physical_system.A_q self._C_q = physical_system.C_q self._A_p = physical_system.A_p self._C_p = physical_system.C_p # Source/Sink term: self._source = physical_system.source # Initializing a variable to track time-elapsed: self.time_elapsed = 0 def _convert_to_q_expanded(self, array): """ Since we are limited to use 4D arrays due to the bound from ArrayFire, we define 2 forms which can be used such that the computations may carried out along all dimensions necessary: q_expanded form:(N_p1 * N_p2 * N_p3, N_s, N_q1, N_q2) p_expanded form:(N_p1, N_p2, N_p3, N_s * N_q1 * N_q2) This function converts the input array from p_expanded to q_expanded form. """ # Obtaining start coordinates for the local zone # Additionally, we also obtain the size of the local zone ((i_q1_start, i_q2_start), (N_q1_local, N_q2_local)) = self._da_f.getCorners() array = af.moddims(array, (self.N_p1 + 2 * self.N_ghost_p) * (self.N_p2 + 2 * self.N_ghost_p) * (self.N_p3 + 2 * self.N_ghost_p), self.N_species, (N_q1_local + 2 * self.N_ghost_q), (N_q2_local + 2 * self.N_ghost_q) ) af.eval(array) return (array) def _convert_to_p_expanded(self, array): """ Since we are limited to use 4D arrays due to the bound from ArrayFire, we define 2 forms which can be used such that the computations may carried out along all dimensions necessary: q_expanded form:(N_p1 * N_p2 * N_p3, N_s, N_q1, N_q2) p_expanded form:(N_p1, N_p2, N_p3, N_s * N_q1 * N_q2) This function converts the input array from q_expanded to p_expanded form. """ # Obtaining start coordinates for the local zone # Additionally, we also obtain the size of the local zone ((i_q1_start, i_q2_start), (N_q1_local, N_q2_local)) = self._da_f.getCorners() array = af.moddims(array, self.N_p1 + 2 * self.N_ghost_p, self.N_p2 + 2 * self.N_ghost_p, self.N_p3 + 2 * self.N_ghost_p, self.N_species * (N_q1_local + 2 * self.N_ghost_q) * (N_q2_local + 2 * self.N_ghost_q) ) af.eval(array) return (array) def _calculate_q_center(self): """ Initializes the cannonical variables q1, q2 using a centered formulation. The size, and resolution are the same as declared under domain of the physical system object. Returns in q_expanded form. """ # Obtaining start coordinates for the local zone # Additionally, we also obtain the size of the local zone ((i_q1_start, i_q2_start), (N_q1_local, N_q2_local)) = self._da_f.getCorners() i_q1_center = i_q1_start + 0.5 i_q2_center = i_q2_start + 0.5 i_q1 = ( i_q1_center + np.arange(-self.N_ghost_q, N_q1_local + self.N_ghost_q) ) i_q2 = ( i_q2_center + np.arange(-self.N_ghost_q, N_q2_local + self.N_ghost_q) ) q1_center = self.q1_start + i_q1 * self.dq1 q2_center = self.q2_start + i_q2 * self.dq2 q2_center, q1_center = np.meshgrid(q2_center, q1_center) q1_center, q2_center = af.to_array(q1_center), af.to_array(q2_center) # To bring the data structure to the default form:(N_p, N_s, N_q1, N_q2) q1_center = af.reorder(q1_center, 3, 2, 0, 1) q2_center = af.reorder(q2_center, 3, 2, 0, 1) af.eval(q1_center, q2_center) return (q1_center, q2_center) def _calculate_p_center(self): """ Initializes the cannonical variables p1, p2 and p3 using a centered formulation. The size, and resolution are the same as declared under domain of the physical system object. """ p1_center = self.p1_start + (0.5 + np.arange(-self.N_ghost_p, self.N_p1 + self.N_ghost_p ) ) * self.dp1 p2_center = self.p2_start + (0.5 + np.arange(-self.N_ghost_p, self.N_p2 + self.N_ghost_p ) ) * self.dp2 p3_center = self.p3_start + (0.5 + np.arange(-self.N_ghost_p, self.N_p3 + self.N_ghost_p ) ) * self.dp3 p2_center, p1_center, p3_center = np.meshgrid(p2_center, p1_center, p3_center ) # Flattening the arrays: p1_center = af.flat(af.to_array(p1_center)) p2_center = af.flat(af.to_array(p2_center))<|fim▁hole|> p1_center = af.tile(p1_center, 1, self.N_species) p2_center = af.tile(p2_center, 1, self.N_species) p3_center = af.tile(p3_center, 1, self.N_species) af.eval(p1_center, p2_center, p3_center) return (p1_center, p2_center, p3_center) def _calculate_p_left(self): p1_left = self.p1_start + np.arange(-self.N_ghost_p, self.N_p1 + self.N_ghost_p ) * self.dp1 p2_center = self.p2_start + (0.5 + np.arange(-self.N_ghost_p, self.N_p2 + self.N_ghost_p ) ) * self.dp2 p3_center = self.p3_start + (0.5 + np.arange(-self.N_ghost_p, self.N_p3 + self.N_ghost_p ) ) * self.dp3 p2_left, p1_left, p3_left = np.meshgrid(p2_center, p1_left, p3_center ) # Flattening the arrays: p1_left = af.flat(af.to_array(p1_left)) p2_left = af.flat(af.to_array(p2_left)) p3_left = af.flat(af.to_array(p3_left)) if(self.N_species > 1): p1_left = af.tile(p1_left, 1, self.N_species) p2_left = af.tile(p2_left, 1, self.N_species) p3_left = af.tile(p3_left, 1, self.N_species) af.eval(p1_left, p2_left, p3_left) return (p1_left, p2_left, p3_left) def _calculate_p_bottom(self): p1_center = self.p1_start + (0.5 + np.arange(-self.N_ghost_p, self.N_p1 + self.N_ghost_p ) ) * self.dp1 p2_bottom = self.p2_start + np.arange(-self.N_ghost_p, self.N_p2 + self.N_ghost_p ) * self.dp2 p3_center = self.p3_start + (0.5 + np.arange(-self.N_ghost_p, self.N_p3 + self.N_ghost_p ) ) * self.dp3 p2_bottom, p1_bottom, p3_bottom = np.meshgrid(p2_bottom, p1_center, p3_center ) # Flattening the arrays: p1_bottom = af.flat(af.to_array(p1_bottom)) p2_bottom = af.flat(af.to_array(p2_bottom)) p3_bottom = af.flat(af.to_array(p3_bottom)) if(self.N_species > 1): p1_bottom = af.tile(p1_bottom, 1, self.N_species) p2_bottom = af.tile(p2_bottom, 1, self.N_species) p3_bottom = af.tile(p3_bottom, 1, self.N_species) af.eval(p1_bottom, p2_bottom, p3_bottom) return (p1_bottom, p2_bottom, p3_bottom) def _calculate_p_back(self): p1_center = self.p1_start + (0.5 + np.arange(-self.N_ghost_p, self.N_p1 + self.N_ghost_p ) ) * self.dp1 p2_center = self.p2_start + (0.5 + np.arange(-self.N_ghost_p, self.N_p2 + self.N_ghost_p ) ) * self.dp2 p3_back = self.p3_start + np.arange(-self.N_ghost_p, self.N_p3 + self.N_ghost_p ) * self.dp3 p2_back, p1_back, p3_back = np.meshgrid(p2_center, p1_center, p3_center ) # Flattening the arrays: p1_back = af.flat(af.to_array(p1_back)) p2_back = af.flat(af.to_array(p2_back)) p3_back = af.flat(af.to_array(p3_back)) if(self.N_species > 1): p1_back = af.tile(p1_back, 1, self.N_species) p2_back = af.tile(p2_back, 1, self.N_species) p3_back = af.tile(p3_back, 1, self.N_species) af.eval(p1_back, p2_back, p3_back) return (p1_back, p2_back, p3_back) def _initialize(self, params): """ Called when the solver object is declared. This function is used to initialize the distribution function, using the options as provided by the user. Parameters ---------- params : file/object params contains all details of which methods to use in addition to useful physical constant. Additionally, it can also be used to inject methods which need to be used inside some solver routine """ # Initializing with the provided I.C's: # af.broadcast, allows us to perform batched operations # when operating on arrays of different sizes # af.broadcast(function, *args) performs batched # operations on function(*args) self.f = af.broadcast(self.physical_system.initial_conditions.\ initialize_f, self.q1_center, self.q2_center, self.p1_center, self.p2_center, self.p3_center, params ) self.f_initial = self.f if(self.physical_system.params.EM_fields_enabled): rho_initial = multiply(self.physical_system.params.charge, self.compute_moments('density') ) self.fields_solver = fields_solver(self.N_q1, self.N_q2, self.N_ghost_q, self.q1_center, self.q2_center, self.dq1, self.dq2, self._comm, self.boundary_conditions, self.physical_system.params, rho_initial, self.performance_test_flag ) # Injection of solver functions into class as methods: _communicate_f = communicate.\ communicate_f _apply_bcs_f = boundaries.apply_bcs_f strang_timestep = timestep.strang_step lie_timestep = timestep.lie_step swss_timestep = timestep.swss_step jia_timestep = timestep.jia_step compute_moments = compute_moments_imported dump_distribution_function = dump.dump_distribution_function dump_moments = dump.dump_moments dump_EM_fields = dump.dump_EM_fields load_distribution_function = load.load_distribution_function load_EM_fields = load.load_EM_fields print_performance_timings = print_table<|fim▁end|>
p3_center = af.flat(af.to_array(p3_center)) if(self.N_species > 1):
<|file_name|>download-dir-name-view.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../../../definitions/jquery/jquery.d.ts" /> /// <reference path="../../../model/config/config.ts" /> /// <reference path="../../view.ts" /> /// <reference path="../../template.ts" /> // TODO: ファイル分けろwww module Prisc { export interface IConfigViewParams { title: string; description: string; name: string; value?: any; } export class ConfigInputView extends View { public tpl: HBSTemplate; public title: string; public description: string; public name: string; public value: any; constructor(params: IConfigViewParams){ super({ className: "boxy" }); this.name = params.name; this.title = params.title; this.description = params.description; this.value = Config.get(this.name) this.tpl = new HBSTemplate("options/contents/common.hbs"); } render(): ConfigInputView { this.$el.append( this.tpl.render({ title: this.title, description: this.description, name: this.name }) ); this.$el.find('.config-input').html( this.renderInputArea() ); return this; } renderInputArea(): string { // interface return ''; } } export class ConfigTextInputView extends ConfigInputView { constructor(params: IConfigViewParams){ super(params); } events(): Object { return { 'keyup input': 'saveText' }; } renderInputArea(): string { var tpl = new HBSTemplate("options/contents/text-input.hbs"); return tpl.render({ name: this.name, value: Config.get(this.name) }); } saveText(ev: JQueryEventObject) { var inputValue = ev.currentTarget['value']; Config.set(this.name, inputValue); } } export class ConfigRadioInputView extends ConfigInputView { public list: any[]; constructor(params: IConfigViewParams){ super(params); } events(): Object { return { 'change input': 'saveRadio' }; } renderInputArea(): string { var tpl = new HBSTemplate("options/contents/radio-input.hbs"); return $.map(this.list, (radio: any) => { var checked = (Config.get(this.name) == radio.value) ? 'checked' : ''; return tpl.render({ name: this.name, value: radio.value, label: radio.label, checked: checked }); }).join(''); } saveRadio(ev: JQueryEventObject) { var inputValue = ev.currentTarget['value']; Config.set(this.name, inputValue); } } export class ConfigCheckboxInputView extends ConfigInputView { constructor(params: IConfigViewParams){ super(params); } events(): Object { return { 'change input': 'saveCheckbox' }; } renderInputArea(): string { var tpl = new HBSTemplate("options/contents/checkbox-input.hbs"); var checked = (Config.get(this.name)) ? 'checked' : ''; return tpl.render({ checked: checked }); } saveCheckbox(ev: JQueryEventObject) { var inputValue = ev.currentTarget['checked']; Config.set(this.name, inputValue); } } export class DownloadDirNameView extends ConfigTextInputView { constructor(){ super({ title: "ダウンロードフォルダ", description: "~/Downloads/{ここで設定したフォルダ名}になります", name: "download-dir-name" }); } } export class ImageFormatView extends ConfigRadioInputView { constructor(){ super({ title: "画像形式", description: "スクリーンショットの画像形式です", name: "image-format" }); this.list = [ {value:ImageFormats.jpeg,label:ImageFormats[ImageFormats.jpeg].replace('e','')}, {value:ImageFormats.png,label:ImageFormats[ImageFormats.png]} ]; } } export class ShowFileOnDownloadView extends ConfigCheckboxInputView { constructor(){ super({ title:"ダウンロード時に保存場所を表示", description:"ダウンロードフォルダで指定した場所に保存された結果を表示します", name: "show-file-on-download" }); } } export class OnlyCaptureView extends ConfigCheckboxInputView { constructor(){ super({ title:"編集せずキャプチャをダウンロード",<|fim▁hole|> } }<|fim▁end|>
description:"編集画面を経由によって画質の劣化があり得ます。これを防ぐためにそのままダウンロードします", name: "only-capture" }); }
<|file_name|>test_login.py<|end_file_name|><|fim▁begin|>import pytest from selenium import webdriver @pytest.fixture def driver(request): wd = webdriver.Firefox(capabilities={"marionette": True}) #(desired_capabilities={"chromeOptions": {"args": ["--start-fullscreen"]}})<|fim▁hole|> return wd def test_example(driver): driver.get("http://localhost/litecart/admin/") driver.find_element_by_xpath("//input[@name='username']").send_keys("admin") driver.find_element_by_xpath("//input[@name='password']").send_keys("admin") driver.find_element_by_xpath("//button[@name='login']").click()<|fim▁end|>
request.addfinalizer(wd.quit)
<|file_name|>config.py<|end_file_name|><|fim▁begin|>"""Pipeline configuration parameters.""" from os.path import dirname, abspath, join from sqlalchemy import create_engine OS_TYPES_URL = ('https://raw.githubusercontent.com/' 'openspending/os-types/master/src/os-types.json') PIPELINE_FILE = 'pipeline-spec.yaml' SOURCE_DATAPACKAGE_FILE = 'source.datapackage.json' SOURCE_FILE = 'source.description.yaml' STATUS_FILE = 'pipeline-status.json' SCRAPER_FILE = 'scraper.py' SOURCE_ZIP = 'source.datapackage.zip' FISCAL_ZIP_FILE = 'fiscal.datapackage.zip' SOURCE_DB = 'source.db.xlsx' DATAPACKAGE_FILE = 'datapackage.json' ROOT_DIR = abspath(join(dirname(__file__), '..')) DATA_DIR = join(ROOT_DIR, 'data') SPECIFICATIONS_DIR = join(ROOT_DIR, 'specifications') PROCESSORS_DIR = join(ROOT_DIR, 'common', 'processors') CODELISTS_DIR = join(ROOT_DIR, 'codelists') DROPBOX_DIR = join(ROOT_DIR, 'dropbox') GEOCODES_FILE = join(ROOT_DIR, 'geography', 'geocodes.nuts.csv') FISCAL_SCHEMA_FILE = join(SPECIFICATIONS_DIR, 'fiscal.schema.yaml') FISCAL_MODEL_FILE = join(SPECIFICATIONS_DIR, 'fiscal.model.yaml') FISCAL_METADATA_FILE = join(SPECIFICATIONS_DIR, 'fiscal.metadata.yaml') DEFAULT_PIPELINE_FILE = join(SPECIFICATIONS_DIR, 'default-pipeline-spec.yaml') TEMPLATE_SCRAPER_FILE = join(PROCESSORS_DIR, 'scraper_template.py') DESCRIPTION_SCHEMA_FILE = join(SPECIFICATIONS_DIR, 'source.schema.json') TEMPLATE_SOURCE_FILE = join(SPECIFICATIONS_DIR, SOURCE_FILE) LOCAL_PATH_EXTRACTOR = 'ingest_local_file' REMOTE_CSV_EXTRACTOR = 'simple_remote_source' REMOTE_EXCEL_EXTRACTOR = 'stream_remote_excel' DATAPACKAGE_MUTATOR = 'mutate_datapackage' DB_URI = 'sqlite:///{}/metrics.sqlite' DB_ENGINE = create_engine(DB_URI.format(ROOT_DIR)) VERBOSE = False LOG_SAMPLE_SIZE = 15 JSON_FORMAT = dict(indent=4, ensure_ascii=False, default=repr) SNIFFER_SAMPLE_SIZE = 5000 SNIFFER_MAX_FAILURE_RATIO = 0.01 IGNORED_FIELD_TAG = '_ignored' UNKNOWN_FIELD_TAG = '_unknown' WARNING_CUTOFF = 10 NUMBER_FORMATS = [ {'format': 'default', 'bareNumber': False, 'decimalChar': '.', 'groupChar': ','}, {'format': 'default', 'bareNumber': False, 'decimalChar': ',', 'groupChar': '.'}, {'format': 'default', 'bareNumber': False, 'decimalChar': '.', 'groupChar': ' '}, {'format': 'default', 'bareNumber': False, 'decimalChar': ',', 'groupChar': ' '}, {'format': 'default', 'bareNumber': False, 'decimalChar': '.', 'groupChar': ''}, {'format': 'default', 'bareNumber': False, 'decimalChar': '.', 'groupChar': '`'}, {'format': 'default', 'bareNumber': False, 'decimalChar': ',', 'groupChar': '\''}, {'format': 'default', 'bareNumber': False, 'decimalChar': ',', 'groupChar': ' '}, ] DATE_FORMATS = [<|fim▁hole|> {'format': '%d//%m/%Y'}, {'format': '%d-%b-%Y'}, # abbreviated month {'format': '%d-%b-%y'}, # abbreviated month {'format': '%d. %b %y'}, # abbreviated month {'format': '%b %y'}, # abbreviated month {'format': '%d/%m/%y'}, {'format': '%d-%m-%Y'}, {'format': '%Y-%m-%d'}, {'format': '%y-%m-%d'}, {'format': '%y.%m.%d'}, {'format': '%Y.%m.%d'}, {'format': '%d.%m.%Y'}, {'format': '%d.%m.%y'}, {'format': '%d.%m.%Y %H:%M'}, {'format': '%Y-%m-%d %H:%M:%S'}, {'format': '%Y-%m-%d %H:%M:%S.%f'}, {'format': '%Y-%m-%dT%H:%M:%SZ'}, {'format': '%m/%d/%Y'}, {'format': '%m/%Y'}, {'format': '%y'}, ]<|fim▁end|>
{'format': '%Y'}, {'format': '%d/%m/%Y'},
<|file_name|>TestDaguCar.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- from rcr.robots.dagucar.DaguCar import DaguCar def main(): car = DaguCar( "/dev/rfcomm1", 500 ) car.MoveForward( 15 ) car.Pause( 1000 ) car.MoveBackward( 15 ) car.Pause( 1000 ) car.MoveLeft( 15 ) car.Pause( 1000 ) car.MoveRight( 15 ) car.Pause( 1000 ) car.MoveForwardLeft( 15 ) car.Pause( 1000 ) car.MoveForwardRight( 15 ) car.Pause( 1000 ) car.MoveBackwardLeft( 15 ) car.Pause( 1000 ) car.MoveBackwardRight( 15 ) car.Pause( 1000 )<|fim▁hole|> ### main()<|fim▁end|>
car.Stop() car.Close()
<|file_name|>owners_file_tags.py<|end_file_name|><|fim▁begin|># Copyright (c) 2017 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import posixpath import re from collections import defaultdict def uniform_path_format(native_path): """Alters the path if needed to be separated by forward slashes.""" return posixpath.normpath(native_path.replace(os.sep, posixpath.sep)) def parse(filename): """Searches the file for lines that start with `# TEAM:` or `# COMPONENT:`. Args: filename (str): path to the file to parse. Returns: a dict with the following format, with any subset of the listed keys: { 'component': 'component>name', 'team': '[email protected]', 'os': 'Linux|Windows|Mac|Android|Chrome|Fuchsia' } """ team_regex = re.compile('\s*#\s*TEAM\s*:\s*(\S+)') component_regex = re.compile('\s*#\s*COMPONENT\s*:\s*(\S+)') os_regex = re.compile('\s*#\s*OS\s*:\s*(\S+)') result = {} with open(filename) as f: for line in f: team_matches = team_regex.match(line) if team_matches: result['team'] = team_matches.group(1) component_matches = component_regex.match(line) if component_matches: result['component'] = component_matches.group(1) os_matches = os_regex.match(line) if os_matches: result['os'] = os_matches.group(1) return result def aggregate_components_from_owners(all_owners_data, root): """Converts the team/component/os tags parsed from OWNERS into mappings. Args: all_owners_data (dict): A mapping from relative path to a dir to a dict mapping the tag names to their values. See docstring for scrape_owners. root (str): the path to the src directory. Returns: A tuple (data, warnings, stats) where data is a dict of the form {'component-to-team': {'Component1': 'team1@chr...', ...}, 'teams-per-component': {'Component1': ['team1@chr...', 'team2@chr...]}, 'dir-to-component': {'/path/to/1': 'Component1', ...}} 'dir-to-team': {'/path/to/1': 'team1@', ...}} , warnings is a list of strings, stats is a dict of form {'OWNERS-count': total number of OWNERS files, 'OWNERS-with-component-only-count': number of OWNERS have # COMPONENT, 'OWNERS-with-team-and-component-count': number of OWNERS have TEAM and COMPONENT, 'OWNERS-count-by-depth': {directory depth: number of OWNERS}, 'OWNERS-with-component-only-count-by-depth': {directory depth: number of OWNERS have COMPONENT at this depth}, 'OWNERS-with-team-and-component-count-by-depth':{directory depth: ...}} """ stats = {} num_total = 0 num_with_component = 0 num_with_team_component = 0 num_total_by_depth = defaultdict(int) num_with_component_by_depth = defaultdict(int) num_with_team_component_by_depth = defaultdict(int) warnings = [] teams_per_component = defaultdict(set) topmost_team = {} dir_to_component = {} dir_missing_info_by_depth = defaultdict(list) dir_to_team = {} for rel_dirname, owners_data in all_owners_data.iteritems(): # Normalize this relative path to posix-style to make counting separators # work correctly as a means of obtaining the file_depth. rel_path = uniform_path_format(os.path.relpath(rel_dirname, root)) file_depth = 0 if rel_path == '.' else rel_path.count(posixpath.sep) + 1 num_total += 1 num_total_by_depth[file_depth] += 1 component = owners_data.get('component') team = owners_data.get('team') os_tag = owners_data.get('os') if os_tag and component: component = '%s(%s)' % (component, os_tag) if team: dir_to_team[rel_dirname] = team if component: num_with_component += 1 num_with_component_by_depth[file_depth] += 1 dir_to_component[rel_dirname] = component if team: num_with_team_component += 1 num_with_team_component_by_depth[file_depth] += 1 teams_per_component[component].add(team) if component not in topmost_team or file_depth < topmost_team[ component]['depth']: topmost_team[component] = {'depth': file_depth, 'team': team} else: rel_owners_path = uniform_path_format(os.path.join(rel_dirname, 'OWNERS')) warnings.append('%s has no COMPONENT tag' % rel_owners_path) if not team and not os_tag: dir_missing_info_by_depth[file_depth].append(rel_owners_path) mappings = { 'component-to-team': { k: v['team'] for k, v in topmost_team.iteritems() }, 'teams-per-component': { k: sorted(list(v)) for k, v in teams_per_component.iteritems() }, 'dir-to-component': dir_to_component, 'dir-to-team': dir_to_team, } warnings += validate_one_team_per_component(mappings) stats = {'OWNERS-count': num_total, 'OWNERS-with-component-only-count': num_with_component, 'OWNERS-with-team-and-component-count': num_with_team_component, 'OWNERS-count-by-depth': num_total_by_depth, 'OWNERS-with-component-only-count-by-depth': num_with_component_by_depth, 'OWNERS-with-team-and-component-count-by-depth': num_with_team_component_by_depth, 'OWNERS-missing-info-by-depth': dir_missing_info_by_depth} return mappings, warnings, stats def validate_one_team_per_component(m): """Validates that each component is associated with at most 1 team.""" warnings = [] # TODO(robertocn): Validate the component names: crbug.com/679540 teams_per_component = m['teams-per-component'] for c in teams_per_component: if len(teams_per_component[c]) > 1: warnings.append('Component %s has the following teams assigned: %s.\n' 'Team %s is being used, as it is defined at the OWNERS ' 'file at the topmost dir' % ( c, ', '.join(teams_per_component[c]), m['component-to-team'][c] )) return warnings def scrape_owners(root, include_subdirs): """Recursively parse OWNERS files for tags. Args: root (str): The directory where to start parsing. include_subdirs (bool): Whether to generate entries for subdirs with no own OWNERS files based on the parent dir's tags. Returns a dict in the form below. { '/path/to/dir': { 'component': 'component>name', 'team': '[email protected]', 'os': 'Linux|Windows|Mac|Android|Chrome|Fuchsia' }, '/path/to/dir/inside/dir': { 'component': ... } } """<|fim▁hole|> ancestor = os.path.dirname(dirname) while ancestor: rel_ancestor = uniform_path_format(os.path.relpath(ancestor, root)) if rel_ancestor in data and data[rel_ancestor].get(tag): return data[rel_ancestor][tag] if rel_ancestor == '.': break ancestor = os.path.dirname(ancestor) return for dirname, _, files in os.walk(root): # Proofing against windows casing oddities. owners_file_names = [f for f in files if f.upper() == 'OWNERS'] rel_dirname = uniform_path_format(os.path.relpath(dirname, root)) if owners_file_names or include_subdirs: if owners_file_names: owners_full_path = os.path.join(dirname, owners_file_names[0]) data[rel_dirname] = parse(owners_full_path) else: data[rel_dirname] = {} for tag in ('component', 'os', 'team'): if not tag in data[rel_dirname]: ancestor_tag = nearest_ancestor_tag(dirname, tag) if ancestor_tag: data[rel_dirname][tag] = ancestor_tag return data<|fim▁end|>
data = {} def nearest_ancestor_tag(dirname, tag): """ Find the value of tag in the nearest ancestor that defines it."""
<|file_name|>fake_cloudidentitygroup.go<|end_file_name|><|fim▁begin|>// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software<|fim▁hole|>// distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // *** DISCLAIMER *** // Config Connector's go-client for CRDs is currently in ALPHA, which means // that future versions of the go-client may include breaking changes. // Please try it out and give us feedback! // Code generated by main. DO NOT EDIT. package fake import ( "context" v1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/clients/generated/apis/cloudidentity/v1beta1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" labels "k8s.io/apimachinery/pkg/labels" schema "k8s.io/apimachinery/pkg/runtime/schema" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" testing "k8s.io/client-go/testing" ) // FakeCloudIdentityGroups implements CloudIdentityGroupInterface type FakeCloudIdentityGroups struct { Fake *FakeCloudidentityV1beta1 ns string } var cloudidentitygroupsResource = schema.GroupVersionResource{Group: "cloudidentity.cnrm.cloud.google.com", Version: "v1beta1", Resource: "cloudidentitygroups"} var cloudidentitygroupsKind = schema.GroupVersionKind{Group: "cloudidentity.cnrm.cloud.google.com", Version: "v1beta1", Kind: "CloudIdentityGroup"} // Get takes name of the cloudIdentityGroup, and returns the corresponding cloudIdentityGroup object, and an error if there is any. func (c *FakeCloudIdentityGroups) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1beta1.CloudIdentityGroup, err error) { obj, err := c.Fake. Invokes(testing.NewGetAction(cloudidentitygroupsResource, c.ns, name), &v1beta1.CloudIdentityGroup{}) if obj == nil { return nil, err } return obj.(*v1beta1.CloudIdentityGroup), err } // List takes label and field selectors, and returns the list of CloudIdentityGroups that match those selectors. func (c *FakeCloudIdentityGroups) List(ctx context.Context, opts v1.ListOptions) (result *v1beta1.CloudIdentityGroupList, err error) { obj, err := c.Fake. Invokes(testing.NewListAction(cloudidentitygroupsResource, cloudidentitygroupsKind, c.ns, opts), &v1beta1.CloudIdentityGroupList{}) if obj == nil { return nil, err } label, _, _ := testing.ExtractFromListOptions(opts) if label == nil { label = labels.Everything() } list := &v1beta1.CloudIdentityGroupList{ListMeta: obj.(*v1beta1.CloudIdentityGroupList).ListMeta} for _, item := range obj.(*v1beta1.CloudIdentityGroupList).Items { if label.Matches(labels.Set(item.Labels)) { list.Items = append(list.Items, item) } } return list, err } // Watch returns a watch.Interface that watches the requested cloudIdentityGroups. func (c *FakeCloudIdentityGroups) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { return c.Fake. InvokesWatch(testing.NewWatchAction(cloudidentitygroupsResource, c.ns, opts)) } // Create takes the representation of a cloudIdentityGroup and creates it. Returns the server's representation of the cloudIdentityGroup, and an error, if there is any. func (c *FakeCloudIdentityGroups) Create(ctx context.Context, cloudIdentityGroup *v1beta1.CloudIdentityGroup, opts v1.CreateOptions) (result *v1beta1.CloudIdentityGroup, err error) { obj, err := c.Fake. Invokes(testing.NewCreateAction(cloudidentitygroupsResource, c.ns, cloudIdentityGroup), &v1beta1.CloudIdentityGroup{}) if obj == nil { return nil, err } return obj.(*v1beta1.CloudIdentityGroup), err } // Update takes the representation of a cloudIdentityGroup and updates it. Returns the server's representation of the cloudIdentityGroup, and an error, if there is any. func (c *FakeCloudIdentityGroups) Update(ctx context.Context, cloudIdentityGroup *v1beta1.CloudIdentityGroup, opts v1.UpdateOptions) (result *v1beta1.CloudIdentityGroup, err error) { obj, err := c.Fake. Invokes(testing.NewUpdateAction(cloudidentitygroupsResource, c.ns, cloudIdentityGroup), &v1beta1.CloudIdentityGroup{}) if obj == nil { return nil, err } return obj.(*v1beta1.CloudIdentityGroup), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). func (c *FakeCloudIdentityGroups) UpdateStatus(ctx context.Context, cloudIdentityGroup *v1beta1.CloudIdentityGroup, opts v1.UpdateOptions) (*v1beta1.CloudIdentityGroup, error) { obj, err := c.Fake. Invokes(testing.NewUpdateSubresourceAction(cloudidentitygroupsResource, "status", c.ns, cloudIdentityGroup), &v1beta1.CloudIdentityGroup{}) if obj == nil { return nil, err } return obj.(*v1beta1.CloudIdentityGroup), err } // Delete takes name of the cloudIdentityGroup and deletes it. Returns an error if one occurs. func (c *FakeCloudIdentityGroups) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { _, err := c.Fake. Invokes(testing.NewDeleteActionWithOptions(cloudidentitygroupsResource, c.ns, name, opts), &v1beta1.CloudIdentityGroup{}) return err } // DeleteCollection deletes a collection of objects. func (c *FakeCloudIdentityGroups) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { action := testing.NewDeleteCollectionAction(cloudidentitygroupsResource, c.ns, listOpts) _, err := c.Fake.Invokes(action, &v1beta1.CloudIdentityGroupList{}) return err } // Patch applies the patch and returns the patched cloudIdentityGroup. func (c *FakeCloudIdentityGroups) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1beta1.CloudIdentityGroup, err error) { obj, err := c.Fake. Invokes(testing.NewPatchSubresourceAction(cloudidentitygroupsResource, c.ns, name, pt, data, subresources...), &v1beta1.CloudIdentityGroup{}) if obj == nil { return nil, err } return obj.(*v1beta1.CloudIdentityGroup), err }<|fim▁end|>
<|file_name|>WAxesButtons.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- encoding: utf-8 -*- from roars.gui.pyqtutils import PyQtWidget, PyQtImageConverter from WBaseWidget import WBaseWidget from PyQt4.QtGui import * from PyQt4.QtCore import * from PyQt4 import QtCore class WAxesButtons(WBaseWidget): def __init__(self, name='axes', label='Axes Buttons', changeCallback=None, step=0.001): super(WAxesButtons, self).__init__(<|fim▁hole|> ) self.name = name self.label = label self.step = step self.ui_label.setText(label) #⬢⬢⬢⬢⬢➤ Callback self.changeCallback = changeCallback self.buttons = { 'x+': self.ui_button_x_plus, 'x-': self.ui_button_x_minus, 'y+': self.ui_button_y_plus, 'y-': self.ui_button_y_minus, 'z+': self.ui_button_z_plus, 'z-': self.ui_button_z_minus } self.buttons_name_map = {} # TODO:example style self.ui_button_x_minus.setStyleSheet( "QPushButton:hover{background-color: red}") for label, button in self.buttons.iteritems(): button.clicked.connect(self.buttonPressed) self.buttons_name_map[str(button.objectName())] = label def buttonPressed(self): if self.changeCallback != None: label = self.buttons_name_map[str(self.sender().objectName())] delta = float(label[1] + str(self.step)) val = (self.name, label[0], delta) self.changeCallback(val)<|fim▁end|>
'ui_axes_buttons'
<|file_name|>solution.py<|end_file_name|><|fim▁begin|># Definition for a binary tree node. # class TreeNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None class Solution(object): def pathSum(self, root, sum): """ :type root: TreeNode :type sum: int<|fim▁hole|> :rtype: List[List[int]] """ def dfs(root, target): if not root: return if not root.left and not root.right: if target == root.val: res.append(path + [root.val]) return path.append(root.val) dfs(root.left, target - root.val) dfs(root.right, target - root.val) path.pop() res, path = [], [] dfs(root, sum) return res # 114 / 114 test cases passed. # Status: Accepted # Runtime: 66 ms # beats 96.94 %<|fim▁end|>
<|file_name|>ada_delta.py<|end_file_name|><|fim▁begin|>import numpy from chainer import cuda from chainer import optimizer _default_hyperparam = optimizer.Hyperparameter() _default_hyperparam.rho = 0.95 _default_hyperparam.eps = 1e-6 class AdaDeltaRule(optimizer.UpdateRule): """Update rule of Zeiler's ADADELTA. See :class:`~chainer.optimizers.AdaDelta` for the default values of the hyperparameters. Args: parent_hyperparam (~chainer.optimizer.Hyperparameter): Hyperparameter that provides the default values. rho (float): Exponential decay rate of the first and second order<|fim▁hole|> eps (float): Small value for the numerical stability. """ def __init__(self, parent_hyperparam=None, rho=None, eps=None): super(AdaDeltaRule, self).__init__( parent_hyperparam or _default_hyperparam) if rho is not None: self.hyperparam.rho = rho if eps is not None: self.hyperparam.eps = eps def init_state(self, param): xp = cuda.get_array_module(param.data) with cuda.get_device_from_array(param.data): self.state['msg'] = xp.zeros_like(param.data) self.state['msdx'] = xp.zeros_like(param.data) def update_core_cpu(self, param): grad = param.grad if grad is None: return msg, msdx = self.state['msg'], self.state['msdx'] rho = self.hyperparam.rho eps = self.hyperparam.eps msg *= rho msg += (1 - rho) * grad * grad dx = numpy.sqrt((msdx + eps) / (msg + eps)) * grad msdx *= rho msdx += (1 - rho) * dx * dx param.data -= dx def update_core_gpu(self, param): grad = param.grad if grad is None: return cuda.elementwise( 'T grad, T one_minus_rho, T eps', 'T param, T msg, T msdx', '''msg = msg + one_minus_rho * (grad * grad - msg); T dx = sqrt((msdx + eps) / (msg + eps)) * grad; msdx += one_minus_rho * (dx * dx - msdx); param -= dx;''', 'adadelta')(grad, 1 - self.hyperparam.rho, self.hyperparam.eps, param.data, self.state['msg'], self.state['msdx']) class AdaDelta(optimizer.GradientMethod): """Zeiler's ADADELTA. See: http://www.matthewzeiler.com/pubs/googleTR2012/googleTR2012.pdf Args: rho (float): Exponential decay rate of the first and second order moments. eps (float): Small value for the numerical stability. """ def __init__(self, rho=_default_hyperparam.rho, eps=_default_hyperparam.eps): super(AdaDelta, self).__init__() self.hyperparam.rho = rho self.hyperparam.eps = eps rho = optimizer.HyperparameterProxy('rho') eps = optimizer.HyperparameterProxy('eps') def create_update_rule(self): return AdaDeltaRule(self.hyperparam)<|fim▁end|>
moments.
<|file_name|>requestmaker.js<|end_file_name|><|fim▁begin|>/*! @license Firebase v4.3.1<|fim▁hole|> "use strict"; //# sourceMappingURL=requestmaker.js.map<|fim▁end|>
Build: rev-b4fe95f Terms: https://firebase.google.com/terms/ */
<|file_name|>raw_building.py<|end_file_name|><|fim▁begin|># Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <[email protected]> # Copyright (c) 2014-2016 Claudiu Popa <[email protected]> # Copyright (c) 2014 Google, Inc. # Copyright (c) 2015-2016 Cara Vinson <[email protected]> # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html # For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER """this module contains a set of functions to create astroid trees from scratch (build_* functions) or from living object (object_build_* functions) """ import inspect import logging import os import sys import types import six from astroid import bases from astroid import manager from astroid import node_classes from astroid import nodes MANAGER = manager.AstroidManager() # the keys of CONST_CLS eg python builtin types _CONSTANTS = tuple(node_classes.CONST_CLS) _JYTHON = os.name == 'java' _BUILTINS = vars(six.moves.builtins) _LOG = logging.getLogger(__name__) def _io_discrepancy(member): # _io module names itself `io`: http://bugs.python.org/issue18602 member_self = getattr(member, '__self__', None) return (member_self and inspect.ismodule(member_self) and member_self.__name__ == '_io' and member.__module__ == 'io') def _attach_local_node(parent, node, name): node.name = name # needed by add_local_node parent.add_local_node(node) def _add_dunder_class(func, member): """Add a __class__ member to the given func node, if we can determine it.""" python_cls = member.__class__ cls_name = getattr(python_cls, '__name__', None) if not cls_name: return cls_bases = [ancestor.__name__ for ancestor in python_cls.__bases__] ast_klass = build_class(cls_name, cls_bases, python_cls.__doc__) func.instance_attrs['__class__'] = [ast_klass] _marker = object() def attach_dummy_node(node, name, runtime_object=_marker): """create a dummy node and register it in the locals of the given node with the specified name """ enode = nodes.EmptyNode() enode.object = runtime_object _attach_local_node(node, enode, name) def _has_underlying_object(self): return self.object is not None and self.object is not _marker nodes.EmptyNode.has_underlying_object = _has_underlying_object def attach_const_node(node, name, value): """create a Const node and register it in the locals of the given node with the specified name """ if name not in node.special_attributes: _attach_local_node(node, nodes.const_factory(value), name) def attach_import_node(node, modname, membername): """create a ImportFrom node and register it in the locals of the given node with the specified name """ from_node = nodes.ImportFrom(modname, [(membername, None)]) _attach_local_node(node, from_node, membername) def build_module(name, doc=None): """create and initialize a astroid Module node""" node = nodes.Module(name, doc, pure_python=False) node.package = False node.parent = None return node def build_class(name, basenames=(), doc=None): """create and initialize a astroid ClassDef node""" node = nodes.ClassDef(name, doc) for base in basenames: basenode = nodes.Name() basenode.name = base node.bases.append(basenode) basenode.parent = node return node def build_function(name, args=None, defaults=None, doc=None): """create and initialize a astroid FunctionDef node""" args, defaults = args or [], defaults or [] # first argument is now a list of decorators func = nodes.FunctionDef(name, doc) func.args = argsnode = nodes.Arguments() argsnode.args = [] for arg in args: argsnode.args.append(nodes.Name()) argsnode.args[-1].name = arg argsnode.args[-1].parent = argsnode argsnode.defaults = [] for default in defaults: argsnode.defaults.append(nodes.const_factory(default)) argsnode.defaults[-1].parent = argsnode argsnode.kwarg = None argsnode.vararg = None argsnode.parent = func if args: register_arguments(func) return func def build_from_import(fromname, names): """create and initialize an astroid ImportFrom import statement""" return nodes.ImportFrom(fromname, [(name, None) for name in names]) def register_arguments(func, args=None): """add given arguments to local args is a list that may contains nested lists (i.e. def func(a, (b, c, d)): ...) """ if args is None: args = func.args.args if func.args.vararg: func.set_local(func.args.vararg, func.args) if func.args.kwarg: func.set_local(func.args.kwarg, func.args) for arg in args: if isinstance(arg, nodes.Name): func.set_local(arg.name, arg) else: register_arguments(func, arg.elts) def object_build_class(node, member, localname): """create astroid for a living class object""" basenames = [base.__name__ for base in member.__bases__] return _base_class_object_build(node, member, basenames, localname=localname) def object_build_function(node, member, localname): """create astroid for a living function object""" # pylint: disable=deprecated-method; completely removed in 2.0 args, varargs, varkw, defaults = inspect.getargspec(member) if varargs is not None: args.append(varargs) if varkw is not None: args.append(varkw) func = build_function(getattr(member, '__name__', None) or localname, args, defaults, member.__doc__) node.add_local_node(func, localname) def object_build_datadescriptor(node, member, name): """create astroid for a living data descriptor object""" return _base_class_object_build(node, member, [], name) def object_build_methoddescriptor(node, member, localname): """create astroid for a living method descriptor object""" # FIXME get arguments ? func = build_function(getattr(member, '__name__', None) or localname, doc=member.__doc__) # set node's arguments to None to notice that we have no information, not # and empty argument list func.args.args = None node.add_local_node(func, localname) _add_dunder_class(func, member) def _base_class_object_build(node, member, basenames, name=None, localname=None): """create astroid for a living class object, with a given set of base names (e.g. ancestors) """ klass = build_class(name or getattr(member, '__name__', None) or localname, basenames, member.__doc__) klass._newstyle = isinstance(member, type) node.add_local_node(klass, localname) try: # limit the instantiation trick since it's too dangerous # (such as infinite test execution...) # this at least resolves common case such as Exception.args, # OSError.errno if issubclass(member, Exception): instdict = member().__dict__ else: raise TypeError except: # pylint: disable=bare-except pass else: for item_name, obj in instdict.items(): valnode = nodes.EmptyNode() valnode.object = obj valnode.parent = klass valnode.lineno = 1 klass.instance_attrs[item_name] = [valnode] return klass def _build_from_function(node, name, member, module): # verify this is not an imported function try: code = six.get_function_code(member) except AttributeError: # Some implementations don't provide the code object, # such as Jython. code = None filename = getattr(code, 'co_filename', None) if filename is None: assert isinstance(member, object) object_build_methoddescriptor(node, member, name) elif filename != getattr(module, '__file__', None): attach_dummy_node(node, name, member) else: object_build_function(node, member, name) class InspectBuilder(object): """class for building nodes from living object this is actually a really minimal representation, including only Module, FunctionDef and ClassDef nodes and some others as guessed. """ # astroid from living objects ############################################### def __init__(self): self._done = {} self._module = None def inspect_build(self, module, modname=None, path=None): """build astroid from a living module (i.e. using inspect) this is used when there is no python source code available (either because it's a built-in module or because the .py is not available) """ self._module = module if modname is None: modname = module.__name__ try: node = build_module(modname, module.__doc__) except AttributeError: # in jython, java modules have no __doc__ (see #109562) node = build_module(modname) node.file = node.path = os.path.abspath(path) if path else path node.name = modname MANAGER.cache_module(node) node.package = hasattr(module, '__path__') self._done = {} self.object_build(node, module) return node def object_build(self, node, obj): """recursive method which create a partial ast from real objects (only function, class, and method are handled) """ if obj in self._done: return self._done[obj] self._done[obj] = node for name in dir(obj): try: member = getattr(obj, name) except AttributeError: # damned ExtensionClass.Base, I know you're there ! attach_dummy_node(node, name) continue if inspect.ismethod(member): member = six.get_method_function(member) if inspect.isfunction(member): _build_from_function(node, name, member, self._module) elif inspect.isbuiltin(member): if (not _io_discrepancy(member) and self.imported_member(node, member, name)): continue object_build_methoddescriptor(node, member, name) elif inspect.isclass(member): if self.imported_member(node, member, name): continue if member in self._done: class_node = self._done[member] if class_node not in node.locals.get(name, ()): node.add_local_node(class_node, name) else: class_node = object_build_class(node, member, name) # recursion self.object_build(class_node, member) if name == '__class__' and class_node.parent is None: class_node.parent = self._done[self._module] elif inspect.ismethoddescriptor(member): assert isinstance(member, object) object_build_methoddescriptor(node, member, name) elif inspect.isdatadescriptor(member): assert isinstance(member, object) object_build_datadescriptor(node, member, name) elif isinstance(member, _CONSTANTS): attach_const_node(node, name, member) elif inspect.isroutine(member): # This should be called for Jython, where some builtin # methods aren't caught by isbuiltin branch. _build_from_function(node, name, member, self._module) else: # create an empty node so that the name is actually defined attach_dummy_node(node, name, member) def imported_member(self, node, member, name): """verify this is not an imported class or handle it""" # /!\ some classes like ExtensionClass doesn't have a __module__ # attribute ! Also, this may trigger an exception on badly built module # (see http://www.logilab.org/ticket/57299 for instance) try: modname = getattr(member, '__module__', None) except: # pylint: disable=bare-except _LOG.exception('unexpected error while building ' 'astroid from living object') modname = None if modname is None: if (name in ('__new__', '__subclasshook__') or (name in _BUILTINS and _JYTHON)): # Python 2.5.1 (r251:54863, Sep 1 2010, 22:03:14) # >>> print object.__new__.__module__ # None modname = six.moves.builtins.__name__ else: attach_dummy_node(node, name, member) return True real_name = { 'gtk': 'gtk_gtk', '_io': 'io', }.get(modname, modname) if real_name != self._module.__name__: # check if it sounds valid and then add an import node, else use a # dummy node try: getattr(sys.modules[modname], name) except (KeyError, AttributeError): attach_dummy_node(node, name, member) else: attach_import_node(node, modname, name) return True return False ### astroid bootstrapping ###################################################### Astroid_BUILDER = InspectBuilder() _CONST_PROXY = {} def _astroid_bootstrapping(astroid_builtin=None): """astroid boot strapping the builtins module""" # this boot strapping is necessary since we need the Const nodes to # inspect_build builtins, and then we can proxy Const if astroid_builtin is None: from six.moves import builtins astroid_builtin = Astroid_BUILDER.inspect_build(builtins) # pylint: disable=redefined-outer-name for cls, node_cls in node_classes.CONST_CLS.items(): if cls is type(None): proxy = build_class('NoneType') proxy.parent = astroid_builtin elif cls is type(NotImplemented): proxy = build_class('NotImplementedType') proxy.parent = astroid_builtin else: proxy = astroid_builtin.getattr(cls.__name__)[0] if cls in (dict, list, set, tuple): node_cls._proxied = proxy else: _CONST_PROXY[cls] = proxy _astroid_bootstrapping() # TODO : find a nicer way to handle this situation; # However __proxied introduced an # infinite recursion (see https://bugs.launchpad.net/pylint/+bug/456870) def _set_proxied(const):<|fim▁hole|> _GeneratorType = nodes.ClassDef(types.GeneratorType.__name__, types.GeneratorType.__doc__) _GeneratorType.parent = MANAGER.astroid_cache[six.moves.builtins.__name__] bases.Generator._proxied = _GeneratorType Astroid_BUILDER.object_build(bases.Generator._proxied, types.GeneratorType) _builtins = MANAGER.astroid_cache[six.moves.builtins.__name__] BUILTIN_TYPES = (types.GetSetDescriptorType, types.GeneratorType, types.MemberDescriptorType, type(None), type(NotImplemented), types.FunctionType, types.MethodType, types.BuiltinFunctionType, types.ModuleType, types.TracebackType) for _type in BUILTIN_TYPES: if _type.__name__ not in _builtins: cls = nodes.ClassDef(_type.__name__, _type.__doc__) cls.parent = MANAGER.astroid_cache[six.moves.builtins.__name__] Astroid_BUILDER.object_build(cls, _type) _builtins[_type.__name__] = cls<|fim▁end|>
return _CONST_PROXY[const.value.__class__] nodes.Const._proxied = property(_set_proxied)
<|file_name|>borrowck-loan-blocks-move-cc.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(box_syntax)] use std::thread::Thread;<|fim▁hole|>fn borrow<F>(v: &isize, f: F) where F: FnOnce(&isize) { f(v); } fn box_imm() { let v = box 3; let _w = &v; Thread::spawn(move|| { println!("v={}", *v); //~^ ERROR cannot move `v` into closure }); } fn box_imm_explicit() { let v = box 3; let _w = &v; Thread::spawn(move|| { println!("v={}", *v); //~^ ERROR cannot move }); } fn main() { }<|fim▁end|>
<|file_name|>test_ha_pair.py<|end_file_name|><|fim▁begin|># Copyright 2015 Infoblox Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock import os from oslo_config import cfg from heat.engine import stack from heat.engine import template from heat.tests import common from heat.tests import utils from heat_infoblox.resources import ha_pair from heat_infoblox.tests.utils import create_side_effect ha_pair_template = { 'heat_template_version': '2013-05-23', 'resources': { 'my_ha_pair': { 'type': 'Infoblox::Grid::HaPair', 'properties': { 'name': 'HaPair1',<|fim▁hole|> 'node2_lan1': 'NODE2_LAN1', 'vip_floating_ip': 'VIP_FLOATING_IP', 'node1_floating_ip': 'NODE1_FLOATING_IP', 'node2_floating_ip': 'NODE2_FLOATING_IP', 'virtual_router_id': 123 } } } } DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) class HaPairTest(common.HeatTestCase): def setUp(self): heat_infoblox_path = os.path.abspath(os.path.join( os.path.dirname(__file__), os.pardir)) cfg.CONF.import_opt('plugin_dirs', 'heat.common.config') cfg.CONF.set_override('plugin_dirs', heat_infoblox_path) super(HaPairTest, self).setUp() self.ctx = utils.dummy_context() def set_stack(self, stack_template): self.stack = stack.Stack( self.ctx, 'ha_pair_test_stack', template.Template(stack_template) ) self.my_ha_pair = self.stack['my_ha_pair'] def test_resource_mapping(self): mapping = ha_pair.resource_mapping() self.assertEqual(1, len(mapping)) self.assertEqual(ha_pair.HaPair, mapping['Infoblox::Grid::HaPair']) def prepair_ha_pair(self, update_ports=True): props = ha_pair_template['resources']['my_ha_pair']['properties'] props['update_allowed_address_pairs'] = update_ports self.set_stack(ha_pair_template) self.my_ha_pair.client = mock.MagicMock() get_first_ip = mock.MagicMock() ports = { 'vip': {'ip_address': '1.1.1.6', 'subnet_id': 'vip_subnet'}, 'node1_lan1': {'ip_address': '1.1.1.4'}, 'node1_ha': {'ip_address': '1.1.1.2'}, 'node2_lan1': {'ip_address': '1.1.1.5'}, 'node2_ha': {'ip_address': '1.1.1.3'}, } get_first_ip.side_effect = create_side_effect(ports) self.my_ha_pair._get_first_ip = get_first_ip self.my_ha_pair.node = mock.MagicMock() self.my_ha_pair.wait_for_https = mock.MagicMock() show_subnet = mock.MagicMock() show_subnet.return_value = {'subnet': {'cidr': '1.1.1.0/24', 'gateway_ip': '1.1.1.1'}} neutron = mock.MagicMock() neutron.show_subnet = show_subnet self.my_ha_pair.client = mock.MagicMock(return_value=neutron) return (props, neutron, ports) def test_handle_create(self): (props, neutron, ports) = self.prepair_ha_pair() with mock.patch('heat_infoblox.resources.grid_member.' 'resource_utils.fix_ha_ports_mac') as fix_ha_ports: # Call 'handle_create' method self.my_ha_pair.handle_create() fix_ha_ports.assert_called_once_with( neutron, {'ipv4': {'address': ports['vip']['ip_address']}}, props['virtual_router_id'], True, (props['node1_ha'], props['node2_ha'])) # Check calls self.assertEqual( [mock.call('vip'), mock.call('node1_ha'), mock.call('node2_ha'), mock.call('node1_lan1'), mock.call('node2_lan1')], self.my_ha_pair._get_first_ip.mock_calls) self.assertEqual( [mock.call('NODE1_FLOATING_IP'), mock.call('VIP_FLOATING_IP'), mock.call('NODE2_FLOATING_IP')], self.my_ha_pair.wait_for_https.mock_calls) self.assertEqual( [mock.call('NODE1_FLOATING_IP', 'admin', 'infoblox'), mock.call().update_member( 'infoblox.localdomain', {'enable_ha': True, 'router_id': 123, 'node_info': [ {'lan_ha_port_setting': {'mgmt_lan': '1.1.1.4', 'ha_ip_address': '1.1.1.2'}}, {'lan_ha_port_setting': {'mgmt_lan': '1.1.1.5', 'ha_ip_address': '1.1.1.3'}}], 'vip_setting': {'subnet_mask': '255.255.255.0', 'gateway': '1.1.1.1', 'address': '1.1.1.6'} }), mock.call('NODE2_FLOATING_IP', 'admin', 'infoblox'), mock.call().join_grid('Infoblox', '1.1.1.6', 'test') ], self.my_ha_pair.node.mock_calls) def test_update_allowed_address_pairs(self): # Prepair member with update_allowed_address_pairs set to False (props, neutron, ports) = self.prepair_ha_pair( update_ports=False) # Call 'handle_create' method and check that fix_ha_ports not called with mock.patch('heat_infoblox.resources.grid_member.' 'resource_utils.fix_ha_ports_mac') as fix_ha_ports: self.my_ha_pair.handle_create() fix_ha_ports.assert_not_called()<|fim▁end|>
'vip': 'VIP', 'node1_ha': 'NODE1_HA', 'node2_ha': 'NODE2_HA', 'node1_lan1': 'NODE1_LAN1',
<|file_name|>MavenRootModelAdapter.java<|end_file_name|><|fim▁begin|>/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.importing; import com.intellij.openapi.externalSystem.service.project.IdeModifiableModelsProvider; import com.intellij.openapi.module.Module; import com.intellij.openapi.roots.*; import com.intellij.openapi.roots.libraries.Library; import com.intellij.openapi.util.io.FileUtil; import com.intellij.pom.java.LanguageLevel; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.maven.model.MavenArtifact; import org.jetbrains.idea.maven.project.MavenProject; import org.jetbrains.idea.maven.utils.Path; import org.jetbrains.jps.model.JpsElement; import org.jetbrains.jps.model.java.JavaSourceRootType; import org.jetbrains.jps.model.module.JpsModuleSourceRootType; import java.io.File; public class MavenRootModelAdapter implements MavenRootModelAdapterInterface { private final MavenRootModelAdapterInterface myDelegate; public MavenRootModelAdapter(MavenRootModelAdapterInterface delegate) {myDelegate = delegate;} @Override public void init(boolean isNewlyCreatedModule) { myDelegate.init(isNewlyCreatedModule); } @Override public ModifiableRootModel getRootModel() { return myDelegate.getRootModel(); } @Override public String @NotNull [] getSourceRootUrls(boolean includingTests) { return myDelegate.getSourceRootUrls(includingTests); } @Override public Module getModule() { return myDelegate.getModule(); } @Override public void clearSourceFolders() { myDelegate.clearSourceFolders(); } @Override public <P extends JpsElement> void addSourceFolder(String path, JpsModuleSourceRootType<P> rootType) { myDelegate.addSourceFolder(path, rootType); } @Override public void addGeneratedJavaSourceFolder(String path, JavaSourceRootType rootType, boolean ifNotEmpty) { myDelegate.addGeneratedJavaSourceFolder(path, rootType, ifNotEmpty); } @Override public void addGeneratedJavaSourceFolder(String path, JavaSourceRootType rootType) { myDelegate.addGeneratedJavaSourceFolder(path, rootType); } @Override public boolean hasRegisteredSourceSubfolder(@NotNull File f) { return myDelegate.hasRegisteredSourceSubfolder(f); } @Override @Nullable public SourceFolder getSourceFolder(File folder) { return myDelegate.getSourceFolder(folder); } @Override public boolean isAlreadyExcluded(File f) { return myDelegate.isAlreadyExcluded(f); } @Override public void addExcludedFolder(String path) { myDelegate.addExcludedFolder(path); } @Override public void unregisterAll(String path, boolean under, boolean unregisterSources) { myDelegate.unregisterAll(path, under, unregisterSources); } @Override public boolean hasCollision(String sourceRootPath) { return myDelegate.hasCollision(sourceRootPath); } @Override public void useModuleOutput(String production, String test) { myDelegate.useModuleOutput(production, test); } @Override public Path toPath(String path) { return myDelegate.toPath(path); } @Override public void addModuleDependency(@NotNull String moduleName, @NotNull DependencyScope scope, boolean testJar) { myDelegate.addModuleDependency(moduleName, scope, testJar); } @Override @Nullable public Module findModuleByName(String moduleName) { return myDelegate.findModuleByName(moduleName); } @Override public void addSystemDependency(MavenArtifact artifact, DependencyScope scope) { myDelegate.addSystemDependency(artifact, scope); } @Override public LibraryOrderEntry addLibraryDependency(MavenArtifact artifact, DependencyScope scope, IdeModifiableModelsProvider provider, MavenProject project) { return myDelegate.addLibraryDependency(artifact, scope, provider, project); } @Override public Library findLibrary(@NotNull MavenArtifact artifact) { return myDelegate.findLibrary(artifact); } @Override public void setLanguageLevel(LanguageLevel level) { myDelegate.setLanguageLevel(level); } static boolean isChangedByUser(Library library) { String[] classRoots = library.getUrls( OrderRootType.CLASSES); if (classRoots.length != 1) return true; String classes = classRoots[0]; if (!classes.endsWith("!/")) return true; int dotPos = classes.lastIndexOf("/", classes.length() - 2 /* trim ending !/ */); if (dotPos == -1) return true; String pathToJar = classes.substring(0, dotPos); if (MavenRootModelAdapter .hasUserPaths(OrderRootType.SOURCES, library, pathToJar)) { return true; } if (MavenRootModelAdapter .hasUserPaths( JavadocOrderRootType .getInstance(), library, pathToJar)) { return true; } return false; } private static boolean hasUserPaths(OrderRootType rootType, Library library, String pathToJar) { String[] sources = library.getUrls(rootType); for (String each : sources) { if (!FileUtil.startsWith(each, pathToJar)) return true; } return false; } public static boolean isMavenLibrary(@Nullable Library library) { return library != null && MavenArtifact.isMavenLibrary(library.getName()); } public static ProjectModelExternalSource getMavenExternalSource() { return ExternalProjectSystemRegistry.getInstance().getSourceById(ExternalProjectSystemRegistry.MAVEN_EXTERNAL_SOURCE_ID); } @Nullable public static OrderEntry findLibraryEntry(@NotNull Module m, @NotNull MavenArtifact artifact) { String name = artifact.getLibraryName(); for (OrderEntry each : ModuleRootManager.getInstance(m).getOrderEntries()) { if (each instanceof LibraryOrderEntry && name.equals(((LibraryOrderEntry)each).getLibraryName())) { return each; } } return null;<|fim▁hole|> public static MavenArtifact findArtifact(@NotNull MavenProject project, @Nullable Library library) { if (library == null) return null; String name = library.getName(); if (!MavenArtifact.isMavenLibrary(name)) return null; for (MavenArtifact each : project.getDependencies()) { if (each.getLibraryName().equals(name)) return each; } return null; } }<|fim▁end|>
} @Nullable
<|file_name|>conditional.js<|end_file_name|><|fim▁begin|>function* f1() { a = (yield) ? 1 : 1; a = yield 1 ? 1 : 1; a = (yield 1) ? 1 : 1; a = 1 ? yield : yield; a = 1 ? yield 1 : yield 1; } function* f2() { a = yield* 1 ? 1 : 1; a = (yield* 1) ? 1 : 1;<|fim▁hole|>} async function f3() { a = await 1 ? 1 : 1; a = (await 1) ? 1 : 1; a = 1 ? await 1 : await 1; }<|fim▁end|>
a = 1 ? yield* 1 : yield* 1;