prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>test_rowcount.py<|end_file_name|><|fim▁begin|>from sqlalchemy import * from sqlalchemy.test import * class FoundRowsTest(TestBase, AssertsExecutionResults): """tests rowcount functionality""" __requires__ = ('sane_rowcount', ) @classmethod def setup_class(cls): global employees_table, metadata metadata = MetaData(testing.db) employees_table = Table('employees', metadata, Column('employee_id', Integer, Sequence('employee_id_seq', optional=True), primary_key=True), Column('name', String(50)), Column('department', String(1)), ) metadata.create_all() def setup(self): global data data = [ ('Angela', 'A'), ('Andrew', 'A'), ('Anand', 'A'), ('Bob', 'B'), ('Bobette', 'B'), ('Buffy', 'B'), ('Charlie', 'C'), ('Cynthia', 'C'), ('Chris', 'C') ] i = employees_table.insert() i.execute(*[{'name':n, 'department':d} for n, d in data]) def teardown(self): employees_table.delete().execute() @classmethod def teardown_class(cls): metadata.drop_all() def testbasic(self): s = employees_table.select()<|fim▁hole|> assert len(r) == len(data) def test_update_rowcount1(self): # WHERE matches 3, 3 rows changed department = employees_table.c.department r = employees_table.update(department=='C').execute(department='Z') print "expecting 3, dialect reports %s" % r.rowcount assert r.rowcount == 3 def test_update_rowcount2(self): # WHERE matches 3, 0 rows changed department = employees_table.c.department r = employees_table.update(department=='C').execute(department='C') print "expecting 3, dialect reports %s" % r.rowcount assert r.rowcount == 3 def test_delete_rowcount(self): # WHERE matches 3, 3 rows deleted department = employees_table.c.department r = employees_table.delete(department=='C').execute() print "expecting 3, dialect reports %s" % r.rowcount assert r.rowcount == 3<|fim▁end|>
r = s.execute().fetchall()
<|file_name|>defaults.go<|end_file_name|><|fim▁begin|>/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.<|fim▁hole|> http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1 import ( "k8s.io/kubernetes/pkg/runtime" ) func addDefaultingFuncs(scheme *runtime.Scheme) { scheme.AddDefaultingFuncs( SetDefaults_Job, ) } func SetDefaults_Job(obj *Job) { // For a non-parallel job, you can leave both `.spec.completions` and // `.spec.parallelism` unset. When both are unset, both are defaulted to 1. if obj.Spec.Completions == nil && obj.Spec.Parallelism == nil { obj.Spec.Completions = new(int32) *obj.Spec.Completions = 1 obj.Spec.Parallelism = new(int32) *obj.Spec.Parallelism = 1 } if obj.Spec.Parallelism == nil { obj.Spec.Parallelism = new(int32) *obj.Spec.Parallelism = 1 } }<|fim▁end|>
You may obtain a copy of the License at
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models from .managers import CRUDManager, CRUDException class CRUDFilterModel(models.Model): class Meta: abstract = True @classmethod def verify_user_has_role(cls, user, role, request): """ Call user-defined auth function to determine if this user can use this role. """ if role in ['anonymous', 'authenticated']: return True elif role == "admin": return user.is_superuser if CRUDManager.auth_function is None: raise CRUDException("You must define an auth_function for CRUDManagerMixin", 500) try: value = CRUDManager.auth_function(role, user, request) except Exception as exc: raise CRUDException("Your auth_function in CRUDManager threw an exception: " + str(exc), 500) if not value: raise CRUDException("This user is not authorized to use this role", 403) return True @classmethod def role_can_perform_operation_with_filter(cls, role, operation, filter_str): """ For this class, make sure this role can perform this operation (with this filter) """ # print("Check cls ", str(cls), " role ", role, " operation ", operation, " filter_str ", filter_str) if operation.upper() not in ['C', 'R', 'U', 'D']: raise CRUDException("Operation must be one of: 'C', 'R', 'U', 'D'", 500) try: filters = CRUDManager.get_filter_set_for_model(cls)['allowed_methods'][role] except KeyError: # Users that are simply authenticated are not allowed: # DUBIOUS LOGIC -- return this if filters is {'__default': None} and role is authenticated. anonymous = 401 if role == "authenticated": raise CRUDException("You must specify a role for this endpoint in the ROLE header", 400) # Invalid role: else: raise CRUDException(role + " is not a valid role", 400) try: allowed_methods = filters[filter_str] except KeyError: # print(filter_str, " not a valid filter for cls ", str(cls), ", role ", role, " -- ", filters) raise CRUDException(filter_str + " is not a valid filter here", 400) # print("Role: ", role, ", allowed_methods: ", str(allowed_methods), " operation: ", operation) if allowed_methods is not None and operation.upper() in [method.upper() for method in allowed_methods]: return True else: return False<|fim▁hole|> @classmethod def __get_objects(cls, user, role, operation, filters=['__default'], request=None): """ Return queryset that this user/role has access to (given these filters) """ # UNSAFE to call this function from outside of the "get_queryset_or_false" function. # If this is not an abstract class, start with all objects, and filter down. if hasattr(cls, 'objects'): object_set = cls.objects.all() else: object_set = [] try: for filter_str in filters: # print("__get_objects with role ", role, " operation ", operation, " filter ", filter_str, " func: ", str(CRUDManager.get_filter_set_for_model(cls)['filter'][role][filter_str])) object_set = CRUDManager.get_filter_set_for_model(cls)['filter'][role][filter_str](object_set, user, request) except CRUDException: # Elevate CRUDExceptions to be caught by middleware raise except Exception: raise CRUDException("Error calling filter functions. Please see the 'QuerySet vs Manager Methods' section of the documentation.", 400) return object_set @classmethod def check_for_permissions(cls, user, role, operation, request, filters=['__default']): """ Make sure this role can perform this operation """ cls.verify_user_has_role(user, role, request) for filter_str in filters: if not cls.role_can_perform_operation_with_filter(role, operation, filter_str): raise CRUDException("Cannot perform this operation with this role.", status_code=403) @classmethod def get_queryset_or_false(cls, user, role, operation, filters=['__default'], request=None, _id=-1, lookup_field='pk'): """ Return queryset (and make sure this item is in the queryset) """ # Redundant? cls.check_for_permissions(user, role, operation, request, filters) # Get our objects: object_set = cls.__get_objects(user, role, operation, filters, request) # If this is a single-object operation, we have to have a valid ID if operation.upper() in ['U', 'D']: if _id == -1: raise CRUDException("ID must be specified for Update and Delete", 400) else: kwargs = {'{0}'.format(lookup_field): _id} if object_set.filter(**kwargs).count() == 0: # It's possible that the object just doesn't exist... but we'll return a 403 to obfuscate raise CRUDException("Cannot perform this operation on this object.", status_code=403) # Later, we can start to perform different operations here: # if operation == 'R': # return object_set # elif operation == "C": # .... return object_set<|fim▁end|>
<|file_name|>creation.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals import wx import widgetUtils class audio_album(widgetUtils.BaseDialog): def __init__(self, *args, **kwargs): super(audio_album, self).__init__(title=_("Create a new album"), parent=None)<|fim▁hole|> panel = wx.Panel(self) sizer = wx.BoxSizer(wx.VERTICAL) lbl = wx.StaticText(panel, wx.NewId(), _("Album title")) self.title = wx.TextCtrl(panel, wx.NewId()) box = wx.BoxSizer(wx.HORIZONTAL) box.Add(lbl, 1, wx.ALL, 5) box.Add(self.title, 1, wx.ALL, 5) sizer.Add(box, 1, wx.ALL, 5) ok = wx.Button(panel, wx.ID_OK, _("&OK")) ok.SetDefault() cancel = wx.Button(panel, wx.ID_CANCEL, _("&Close")) btnsizer = wx.BoxSizer() btnsizer.Add(ok, 0, wx.ALL, 5) btnsizer.Add(cancel, 0, wx.ALL, 5) sizer.Add(btnsizer, 0, wx.ALL, 5) panel.SetSizer(sizer) self.SetClientSize(sizer.CalcMin())<|fim▁end|>
<|file_name|>mix_wav.py<|end_file_name|><|fim▁begin|>__author__ = 'ray' import wave import numpy as np wav_1_path = "origin.wav" wav_2_path = "clap.wav" wav_out_path = "mixed.wav" wav_1 = wave.open(wav_1_path, 'rb') wav_2 = wave.open(wav_2_path, 'rb') wav_out = wave.open(wav_out_path, 'wb') len_1 = wav_1.getnframes() len_2 = wav_2.getnframes() if len_1>len_2: wav_out.setparams(wav_1.getparams()) else:<|fim▁hole|>signal_1 = np.fromstring(wav_1.readframes(-1), 'Int16') signal_2 = np.fromstring(wav_2.readframes(-1), 'Int16') if len_1>len_2: signal_out = np.append(signal_1[:len_2]+signal_2, signal_1[len_2:]).tostring() elif len_2>len_1: signal_out = np.append(signal_1+signal_2[:len_1], signal_2[len_1:]).tostring() else: signal_out = (signal_1+signal_2).tostring() wav_out.writeframes(signal_out) wav_1.close() wav_2.close() wav_out.close() print 'done!'<|fim▁end|>
wav_out.setparams(wav_2.getparams())
<|file_name|>func.go<|end_file_name|><|fim▁begin|>package main // #include <stdlib.h> // #include <locale.h> import "C" import ( "os" "path/filepath" "runtime" "unsafe" ) const LC_NUMERIC = int(C.LC_NUMERIC) // setLocale sets locale func setLocale(lc int, locale string) {<|fim▁hole|> defer C.free(unsafe.Pointer(l)) C.setlocale(C.int(lc), l) } // inSlice checks if string is in slice func inSlice(a string, b []string) bool { for _, i := range b { if a == i { return true } } return false } // homeDir returns user home directory func homeDir() string { if runtime.GOOS == "windows" { home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") if home == "" { home = os.Getenv("USERPROFILE") } return home } return os.Getenv("HOME") } // cacheDir returns cache directory func cacheDir() string { dir := os.Getenv("XDG_CACHE_HOME") if dir == "" { dir = filepath.Join(homeDir(), ".cache", "bukanir") } else { dir = filepath.Join(dir, "bukanir") } return dir }<|fim▁end|>
l := C.CString(locale)
<|file_name|>render.rs<|end_file_name|><|fim▁begin|>use glfw_ffi::*; use nanovg; use std::os::raw::c_int; use std::ptr; #[repr(usize)] #[derive(PartialEq, Eq)] pub enum Fonts { Inter = 0, Vga8, Moderno, NumFonts, } pub struct RenderContext<'a> { window: *mut GLFWwindow, nvg: &'a nanovg::Context, fonts: [nanovg::Font<'a>; Fonts::NumFonts as usize], } impl<'a> RenderContext<'a> { pub fn new( window: *mut GLFWwindow, nvg: &'a nanovg::Context, fonts: [nanovg::Font<'a>; Fonts::NumFonts as usize], ) -> Self { Self { window, nvg, fonts } } pub fn size(&self) -> (f32, f32) { let (mut w, mut h) = (0i32, 0i32); unsafe { glfwGetWindowSize(self.window, &mut w as *mut _, &mut h as *mut _); } (w as f32, h as f32) } pub fn pixel_ratio(&self) -> f32 { unsafe { let mut fb_width: c_int = 0; let mut win_width: c_int = 0; glfwGetFramebufferSize(self.window, &mut fb_width as *mut _, ptr::null_mut()); glfwGetWindowSize(self.window, &mut win_width as *mut _, ptr::null_mut()); fb_width as f32 / win_width as f32 } } pub fn frame<F: FnOnce(nanovg::Frame)>(&self, f: F) { self.nvg.frame(self.size(), self.pixel_ratio(), f); } pub fn font(&self, id: Fonts) -> nanovg::Font<'a> { if id == Fonts::NumFonts {<|fim▁hole|> panic!("Tried to access font `Fonts::NumFonts`"); } self.fonts[id as usize] } }<|fim▁end|>
<|file_name|>barrier.rs<|end_file_name|><|fim▁begin|>#[cfg(test)] mod tests; use crate::fmt; use crate::sync::{Condvar, Mutex}; /// A barrier enables multiple threads to synchronize the beginning /// of some computation. /// /// # Examples /// /// ``` /// use std::sync::{Arc, Barrier}; /// use std::thread; /// /// let mut handles = Vec::with_capacity(10); /// let barrier = Arc::new(Barrier::new(10)); /// for _ in 0..10 { /// let c = Arc::clone(&barrier); /// // The same messages will be printed together. /// // You will NOT see any interleaving. /// handles.push(thread::spawn(move|| { /// println!("before wait"); /// c.wait(); /// println!("after wait"); /// })); /// } /// // Wait for other threads to finish. /// for handle in handles { /// handle.join().unwrap(); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub struct Barrier { lock: Mutex<BarrierState>, cvar: Condvar, num_threads: usize, } // The inner state of a double barrier struct BarrierState { count: usize, generation_id: usize, } /// A `BarrierWaitResult` is returned by [`Barrier::wait()`] when all threads /// in the [`Barrier`] have rendezvoused. /// /// # Examples /// /// ``` /// use std::sync::Barrier; /// /// let barrier = Barrier::new(1); /// let barrier_wait_result = barrier.wait(); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub struct BarrierWaitResult(bool); #[stable(feature = "std_debug", since = "1.16.0")] impl fmt::Debug for Barrier { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Barrier").finish_non_exhaustive() } } impl Barrier { /// Creates a new barrier that can block a given number of threads. /// /// A barrier will block `n`-1 threads which call [`wait()`] and then wake /// up all threads at once when the `n`th thread calls [`wait()`]. /// /// [`wait()`]: Barrier::wait /// /// # Examples /// /// ``` /// use std::sync::Barrier; /// /// let barrier = Barrier::new(10); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[must_use] pub fn new(n: usize) -> Barrier { Barrier { lock: Mutex::new(BarrierState { count: 0, generation_id: 0 }), cvar: Condvar::new(), num_threads: n,<|fim▁hole|> /// Blocks the current thread until all threads have rendezvoused here. /// /// Barriers are re-usable after all threads have rendezvoused once, and can /// be used continuously. /// /// A single (arbitrary) thread will receive a [`BarrierWaitResult`] that /// returns `true` from [`BarrierWaitResult::is_leader()`] when returning /// from this function, and all other threads will receive a result that /// will return `false` from [`BarrierWaitResult::is_leader()`]. /// /// # Examples /// /// ``` /// use std::sync::{Arc, Barrier}; /// use std::thread; /// /// let mut handles = Vec::with_capacity(10); /// let barrier = Arc::new(Barrier::new(10)); /// for _ in 0..10 { /// let c = Arc::clone(&barrier); /// // The same messages will be printed together. /// // You will NOT see any interleaving. /// handles.push(thread::spawn(move|| { /// println!("before wait"); /// c.wait(); /// println!("after wait"); /// })); /// } /// // Wait for other threads to finish. /// for handle in handles { /// handle.join().unwrap(); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn wait(&self) -> BarrierWaitResult { let mut lock = self.lock.lock().unwrap(); let local_gen = lock.generation_id; lock.count += 1; if lock.count < self.num_threads { // We need a while loop to guard against spurious wakeups. // https://en.wikipedia.org/wiki/Spurious_wakeup while local_gen == lock.generation_id { lock = self.cvar.wait(lock).unwrap(); } BarrierWaitResult(false) } else { lock.count = 0; lock.generation_id = lock.generation_id.wrapping_add(1); self.cvar.notify_all(); BarrierWaitResult(true) } } } #[stable(feature = "std_debug", since = "1.16.0")] impl fmt::Debug for BarrierWaitResult { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("BarrierWaitResult").field("is_leader", &self.is_leader()).finish() } } impl BarrierWaitResult { /// Returns `true` if this thread is the "leader thread" for the call to /// [`Barrier::wait()`]. /// /// Only one thread will have `true` returned from their result, all other /// threads will have `false` returned. /// /// # Examples /// /// ``` /// use std::sync::Barrier; /// /// let barrier = Barrier::new(1); /// let barrier_wait_result = barrier.wait(); /// println!("{:?}", barrier_wait_result.is_leader()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[must_use] pub fn is_leader(&self) -> bool { self.0 } }<|fim▁end|>
} }
<|file_name|>test_smoke.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import io import os import unittest import logging import uuid from mediafire import MediaFireApi, MediaFireUploader, UploadSession from mediafire.uploader import UPLOAD_SIMPLE_LIMIT_BYTES<|fim▁hole|>APP_ID = '42511' MEDIAFIRE_EMAIL = os.environ.get('MEDIAFIRE_EMAIL') MEDIAFIRE_PASSWORD = os.environ.get('MEDIAFIRE_PASSWORD') class MediaFireSmokeBaseTestCase(object): """Smoke tests for API""" class BaseTest(unittest.TestCase): def setUp(self): # Reset logging to info to avoid leaking credentials logger = logging.getLogger('mediafire.api') logger.setLevel(logging.INFO) self.api = MediaFireApi() session = self.api.user_get_session_token( app_id=APP_ID, email=MEDIAFIRE_EMAIL, password=MEDIAFIRE_PASSWORD) self.api.session = session @unittest.skipIf('CI' not in os.environ, "Running outside CI environment") class MediaFireSmokeSimpleTest(MediaFireSmokeBaseTestCase.BaseTest): """Simple tests""" def test_user_get_info(self): result = self.api.user_get_info() self.assertEqual(result["user_info"]["display_name"], u"Coalmine Smoketest") @unittest.skipIf('CI' not in os.environ, "Running outside CI environment") class MediaFireSmokeWithDirectoryTest(MediaFireSmokeBaseTestCase.BaseTest): """Smoke tests requiring temporary directory""" def setUp(self): super(MediaFireSmokeWithDirectoryTest, self).setUp() folder_uuid = str(uuid.uuid4()) result = self.api.folder_create(foldername=folder_uuid) self.folder_key = result["folder_key"] def tearDown(self): self.api.folder_purge(self.folder_key) def test_upload_small(self): """Test simple upload""" # make sure we most likely will get upload/simple data = b'This is a tiny file content: ' + os.urandom(32) fd = io.BytesIO(data) uploader = MediaFireUploader(self.api) with UploadSession(self.api): result = uploader.upload(fd, 'smallfile.txt', folder_key=self.folder_key) self.assertIsNotNone(result.quickkey) self.assertEqual(result.action, 'upload/simple') def test_upload_large(self): """Test large file upload""" # make sure we will get upload/resumable, prefix + 4MiB data = b'Long line is long: ' + os.urandom(UPLOAD_SIMPLE_LIMIT_BYTES) fd = io.BytesIO(data) uploader = MediaFireUploader(self.api) with UploadSession(self.api): result = uploader.upload(fd, 'bigfile.txt', folder_key=self.folder_key) self.assertIsNotNone(result.quickkey) self.assertEqual(result.action, 'upload/resumable') if __name__ == "__main__": unittest.main()<|fim▁end|>
<|file_name|>curve.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2015-2021 The btcsuite developers // Copyright (c) 2015-2021 The Decred developers package btcec import ( secp "github.com/decred/dcrd/dcrec/secp256k1/v4" ) // JacobianPoint is an element of the group formed by the secp256k1 curve in // Jacobian projective coordinates and thus represents a point on the curve. type JacobianPoint = secp.JacobianPoint // MakeJacobianPoint returns a Jacobian point with the provided X, Y, and Z // coordinates. func MakeJacobianPoint(x, y, z *FieldVal) JacobianPoint { return secp.MakeJacobianPoint(x, y, z) } // AddNonConst adds the passed Jacobian points together and stores the result // in the provided result param in *non-constant* time. func AddNonConst(p1, p2, result *JacobianPoint) { secp.AddNonConst(p1, p2, result) } // DecompressY attempts to calculate the Y coordinate for the given X // coordinate such that the result pair is a point on the secp256k1 curve. It // adjusts Y based on the desired oddness and returns whether or not it was // successful since not all X coordinates are valid. // // The magnitude of the provided X coordinate field val must be a max of 8 for // a correct result. The resulting Y field val will have a max magnitude of 2. func DecompressY(x *FieldVal, odd bool, resultY *FieldVal) bool { return secp.DecompressY(x, odd, resultY) } // DoubleNonConst doubles the passed Jacobian point and stores the result in // the provided result parameter in *non-constant* time.<|fim▁hole|>// result. The resulting point will be normalized. func DoubleNonConst(p, result *JacobianPoint) { secp.DoubleNonConst(p, result) } // ScalarBaseMultNonConst multiplies k*G where G is the base point of the group // and k is a big endian integer. The result is stored in Jacobian coordinates // (x1, y1, z1). // // NOTE: The resulting point will be normalized. func ScalarBaseMultNonConst(k *ModNScalar, result *JacobianPoint) { secp.ScalarBaseMultNonConst(k, result) } // ScalarMultNonConst multiplies k*P where k is a big endian integer modulo the // curve order and P is a point in Jacobian projective coordinates and stores // the result in the provided Jacobian point. // // NOTE: The point must be normalized for this function to return the correct // result. The resulting point will be normalized. func ScalarMultNonConst(k *ModNScalar, point, result *JacobianPoint) { secp.ScalarMultNonConst(k, point, result) }<|fim▁end|>
// // NOTE: The point must be normalized for this function to return the correct
<|file_name|>unclosed-braces.rs<|end_file_name|><|fim▁begin|>struct S { x: [usize; 3], } fn foo() { { { println!("hi"); } } } fn main() { //~^ NOTE unclosed delimiter { {<|fim▁hole|> //~^ NOTE this delimiter might not be properly closed... foo(); } //~^ NOTE ...as it matches this but it has different indentation } //~ ERROR this file contains an unclosed delimiter<|fim▁end|>
<|file_name|>base.py<|end_file_name|><|fim▁begin|># Copyright (C) 2010 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the Google name nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Abstract base class for Port classes. The Port classes encapsulate Port-specific (platform-specific) behavior in the web test infrastructure. """ import time import collections import json import logging import optparse import os import re import sys import tempfile from collections import defaultdict import six from six.moves import zip_longest from blinkpy.common import exit_codes from blinkpy.common import find_files from blinkpy.common import read_checksum_from_png from blinkpy.common import path_finder from blinkpy.common.memoized import memoized from blinkpy.common.system.executive import ScriptError from blinkpy.common.system.path import abspath_to_uri from blinkpy.w3c.wpt_manifest import WPTManifest, MANIFEST_NAME from blinkpy.web_tests.layout_package.bot_test_expectations import BotTestExpectationsFactory from blinkpy.web_tests.models.test_configuration import TestConfiguration from blinkpy.web_tests.models.test_run_results import TestRunException from blinkpy.web_tests.models.typ_types import TestExpectations, ResultType from blinkpy.web_tests.port import driver from blinkpy.web_tests.port import server_process from blinkpy.web_tests.port.factory import PortFactory from blinkpy.web_tests.servers import apache_http from blinkpy.web_tests.servers import pywebsocket from blinkpy.web_tests.servers import wptserve _log = logging.getLogger(__name__) # Path relative to the build directory. CONTENT_SHELL_FONTS_DIR = "test_fonts" FONT_FILES = [ [[CONTENT_SHELL_FONTS_DIR], 'Ahem.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Arimo-Bold.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Arimo-BoldItalic.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Arimo-Italic.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Arimo-Regular.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Cousine-Bold.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Cousine-BoldItalic.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Cousine-Italic.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Cousine-Regular.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'DejaVuSans.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'GardinerModBug.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'GardinerModCat.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Garuda.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Gelasio-Bold.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Gelasio-BoldItalic.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Gelasio-Italic.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Gelasio-Regular.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Lohit-Devanagari.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Lohit-Gurmukhi.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Lohit-Tamil.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'MuktiNarrow.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'NotoColorEmoji.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'NotoSansCJKjp-Regular.otf', None], [[CONTENT_SHELL_FONTS_DIR], 'NotoSansKhmer-Regular.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'NotoSansSymbols2-Regular.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'NotoSansTibetan-Regular.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Tinos-Bold.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Tinos-BoldItalic.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Tinos-Italic.ttf', None], [[CONTENT_SHELL_FONTS_DIR], 'Tinos-Regular.ttf', None], ] # This is the fingerprint of wpt's certificate found in # blinkpy/third_party/wpt/certs. The following line is updated by # update_cert.py. WPT_FINGERPRINT = 'Nxvaj3+bY3oVrTc+Jp7m3E3sB1n3lXtnMDCyBsqEXiY=' # One for 127.0.0.1.sxg.pem SXG_FINGERPRINT = '55qC1nKu2A88ESbFmk5sTPQS/ScG+8DD7P+2bgFA9iM=' # And one for external/wpt/signed-exchange/resources/127.0.0.1.sxg.pem SXG_WPT_FINGERPRINT = '0Rt4mT6SJXojEMHTnKnlJ/hBKMBcI4kteBlhR1eTTdk=' # A convervative rule for names that are valid for file or directory names. VALID_FILE_NAME_REGEX = re.compile(r'^[\w\-=]+$') # This sub directory will be inside the results directory and it will # contain all the disc artifacts created by web tests ARTIFACTS_SUB_DIR = 'layout-test-results' class Port(object): """Abstract class for Port-specific hooks for the web_test package.""" # Subclasses override this. This should indicate the basic implementation # part of the port name, e.g., 'mac', 'win', 'gtk'; there is one unique # value per class. # FIXME: Rename this to avoid confusion with the "full port name". port_name = None # Test paths use forward slash as separator on all platforms. TEST_PATH_SEPARATOR = '/' ALL_BUILD_TYPES = ('debug', 'release') CONTENT_SHELL_NAME = 'content_shell' # Update the first line in third_party/blink/web_tests/TestExpectations and # the documentation in docs/testing/web_test_expectations.md when this list # changes. ALL_SYSTEMS = ( ('mac10.12', 'x86'), ('mac10.13', 'x86'), ('mac10.14', 'x86'), ('mac10.15', 'x86'), ('mac11', 'x86'), ('mac11-arm64', 'arm64'), ('win7', 'x86'), ('win10.20h2', 'x86'), ('trusty', 'x86_64'), ('fuchsia', 'x86_64'), ) CONFIGURATION_SPECIFIER_MACROS = { 'mac': [ 'mac10.12', 'mac10.13', 'mac10.14', 'mac10.15', 'mac11', 'mac11-arm64' ], 'win': ['win7', 'win10.20h2'], 'linux': ['trusty'], 'fuchsia': ['fuchsia'], } # List of ports open on the host that the tests will connect to. When tests # run on a separate machine (Android and Fuchsia) these ports need to be # forwarded back to the host. # 8000, 8080 and 8443 are for http/https tests; # 8880 is for websocket tests (see apache_http.py and pywebsocket.py). # 8001, 8081, 8444, and 8445 are for http/https WPT; # 9001 and 9444 are for websocket WPT (see wptserve.py). SERVER_PORTS = [8000, 8001, 8080, 8081, 8443, 8444, 8445, 8880, 9001, 9444] FALLBACK_PATHS = {} SUPPORTED_VERSIONS = [] # URL to the build requirements page. BUILD_REQUIREMENTS_URL = '' # The suffixes of baseline files (not extensions). BASELINE_SUFFIX = '-expected' BASELINE_MISMATCH_SUFFIX = '-expected-mismatch' # All of the non-reftest baseline extensions we use. BASELINE_EXTENSIONS = ('.wav', '.txt', '.png') FLAG_EXPECTATIONS_PREFIX = 'FlagExpectations' # The following is used for concetenating WebDriver test names. WEBDRIVER_SUBTEST_SEPARATOR = '>>' # The following is used for concetenating WebDriver test names in pytest format. WEBDRIVER_SUBTEST_PYTEST_SEPARATOR = '::' # The following two constants must match. When adding a new WPT root, also # remember to add an alias rule to //third_party/wpt_tools/wpt.config.json. # WPT_DIRS maps WPT roots on the file system to URL prefixes on wptserve. # The order matters: '/' MUST be the last URL prefix. WPT_DIRS = collections.OrderedDict([ ('wpt_internal', '/wpt_internal/'), ('external/wpt', '/'), ]) # WPT_REGEX captures: 1. the root directory of WPT relative to web_tests # (without a trailing slash), 2. the path of the test within WPT (without a # leading slash). WPT_REGEX = re.compile( r'^(?:virtual/[^/]+/)?(external/wpt|wpt_internal)/(.*)$') # Because this is an abstract base class, arguments to functions may be # unused in this class - pylint: disable=unused-argument @classmethod def latest_platform_fallback_path(cls): return cls.FALLBACK_PATHS[cls.SUPPORTED_VERSIONS[-1]] @classmethod def determine_full_port_name(cls, host, options, port_name): """Return a fully-specified port name that can be used to construct objects.""" # Subclasses will usually override this. assert port_name.startswith(cls.port_name) return port_name def __init__(self, host, port_name, options=None, **kwargs): # This value is the "full port name", and may be different from # cls.port_name by having version modifiers appended to it. self._name = port_name # These are default values that should be overridden in a subclasses. self._version = '' self._architecture = 'x86' # FIXME: Ideally we'd have a package-wide way to get a well-formed # options object that had all of the necessary options defined on it. self._options = options or optparse.Values() self.host = host self._executive = host.executive self._filesystem = host.filesystem self._path_finder = path_finder.PathFinder(host.filesystem) self._http_server = None self._websocket_server = None self._wpt_server = None self._image_differ = None self.server_process_constructor = server_process.ServerProcess # This can be overridden for testing. self._http_lock = None # FIXME: Why does this live on the port object? self._dump_reader = None if not hasattr(options, 'configuration') or not options.configuration: self.set_option_default('configuration', self.default_configuration()) if not hasattr(options, 'target') or not options.target: self.set_option_default('target', self._options.configuration) self._test_configuration = None self._results_directory = None self._virtual_test_suites = None self._used_expectation_files = None def __str__(self): return 'Port{name=%s, version=%s, architecture=%s, test_configuration=%s}' % ( self._name, self._version, self._architecture, self._test_configuration) def get_platform_tags(self): """Returns system condition tags that are used to find active expectations for a test run on a specific system""" return frozenset([ self._options.configuration.lower(), self._version, self.port_name, self._architecture ]) @memoized def flag_specific_config_name(self): """Returns the name of the flag-specific configuration which best matches self._specified_additional_driver_flags(), or the first specified flag with leading '-'s stripped if no match in the configuration is found. """ specified_flags = self._specified_additional_driver_flags() if not specified_flags: return None best_match = None configs = self._flag_specific_configs() for name in configs: # To match the specified flags must start with all config args. args = configs[name] if specified_flags[:len(args)] != args: continue # The first config matching the highest number of specified flags wins. if not best_match or len(configs[best_match]) < len(args): best_match = name if best_match: return best_match # If no match, fallback to the old mode: using the name of the first specified flag. return specified_flags[0].lstrip('-') @memoized def _flag_specific_configs(self): """Reads configuration from FlagSpecificConfig and returns a dictionary from name to args.""" config_file = self._filesystem.join(self.web_tests_dir(), 'FlagSpecificConfig') if not self._filesystem.exists(config_file): return {} try: json_configs = json.loads( self._filesystem.read_text_file(config_file)) except ValueError as error: raise ValueError('{} is not a valid JSON file: {}'.format( config_file, error)) configs = {} for config in json_configs: name = config['name'] args = config['args'] if not VALID_FILE_NAME_REGEX.match(name): raise ValueError( '{}: name "{}" contains invalid characters'.format( config_file, name)) if name in configs: raise ValueError('{} contains duplicated name {}.'.format( config_file, name)) if args in configs.values(): raise ValueError( '{}: name "{}" has the same args as another entry.'.format( config_file, name)) configs[name] = args return configs def _specified_additional_driver_flags(self): """Returns the list of additional driver flags specified by the user in the following ways, concatenated: 1. Flags in web_tests/additional-driver-flag.setting. 2. flags expanded from --flag-specific=<name> based on flag-specific config. 3. Zero or more flags passed by --additional-driver-flag. """ flags = [] flag_file = self._filesystem.join(self.web_tests_dir(), 'additional-driver-flag.setting') if self._filesystem.exists(flag_file): flags = self._filesystem.read_text_file(flag_file).split() flag_specific_option = self.get_option('flag_specific') if flag_specific_option: configs = self._flag_specific_configs() assert flag_specific_option in configs, '{} is not defined in FlagSpecificConfig'.format( flag_specific_option) flags += configs[flag_specific_option] flags += self.get_option('additional_driver_flag', []) return flags def additional_driver_flags(self): flags = self._specified_additional_driver_flags() if self.driver_name() == self.CONTENT_SHELL_NAME: flags += [ '--run-web-tests', '--ignore-certificate-errors-spki-list=' + WPT_FINGERPRINT + ',' + SXG_FINGERPRINT + ',' + SXG_WPT_FINGERPRINT, # Required for WebTransport tests. '--origin-to-force-quic-on=web-platform.test:11000', '--user-data-dir' ] if self.get_option('nocheck_sys_deps', False): flags.append('--disable-system-font-check') # If we're already repeating the tests more than once, then we're not # particularly concerned with speed. Resetting the shell between tests # increases test run time by 2-5X, but provides more consistent results # [less state leaks between tests]. if (self.get_option('reset_shell_between_tests') or (self.get_option('repeat_each') and self.get_option('repeat_each') > 1) or (self.get_option('iterations') and self.get_option('iterations') > 1)): flags += ['--reset-shell-between-tests'] return flags def supports_per_test_timeout(self): return False def default_smoke_test_only(self): return False def _default_timeout_ms(self): return 6000 def timeout_ms(self): timeout_ms = self._default_timeout_ms() if self.get_option('configuration') == 'Debug': # Debug is about 5x slower than Release. return 5 * timeout_ms if self._build_has_dcheck_always_on(): # Release with DCHECK is also slower than pure Release. return 2 * timeout_ms return timeout_ms @memoized def _build_has_dcheck_always_on(self): args_gn_file = self._build_path('args.gn') if not self._filesystem.exists(args_gn_file): _log.error('Unable to find %s', args_gn_file) return False contents = self._filesystem.read_text_file(args_gn_file) return bool( re.search(r'^\s*dcheck_always_on\s*=\s*true\s*(#.*)?$', contents, re.MULTILINE)) def driver_stop_timeout(self): """Returns the amount of time in seconds to wait before killing the process in driver.stop().""" # We want to wait for at least 3 seconds, but if we are really slow, we # want to be slow on cleanup as well (for things like ASAN, Valgrind, etc.) return (3.0 * float(self.get_option('time_out_ms', '0')) / self._default_timeout_ms()) def default_batch_size(self): """Returns the default batch size to use for this port.""" if self.get_option('enable_sanitizer'): # ASAN/MSAN/TSAN use more memory than regular content_shell. Their # memory usage may also grow over time, up to a certain point. # Relaunching the driver periodically helps keep it under control. return 40 # The default is infinite batch size. return 0 def default_child_processes(self): """Returns the number of child processes to use for this port.""" return self._executive.cpu_count() def default_max_locked_shards(self): """Returns the number of "locked" shards to run in parallel (like the http tests).""" max_locked_shards = int(self.default_child_processes()) // 4 if not max_locked_shards: return 1 return max_locked_shards def baseline_version_dir(self): """Returns the absolute path to the platform-and-version-specific results.""" baseline_search_paths = self.baseline_search_path() return baseline_search_paths[0] def baseline_flag_specific_dir(self): """If --additional-driver-flag is specified, returns the absolute path to the flag-specific platform-independent results. Otherwise returns None.""" flag_specific_path = self._flag_specific_baseline_search_path() return flag_specific_path[-1] if flag_specific_path else None def baseline_search_path(self): return (self.get_option('additional_platform_directory', []) + self._flag_specific_baseline_search_path() + self._compare_baseline() + list(self.default_baseline_search_path())) def default_baseline_search_path(self): """Returns a list of absolute paths to directories to search under for baselines. The directories are searched in order. """ return map(self._absolute_baseline_path, self.FALLBACK_PATHS[self.version()]) @memoized def _compare_baseline(self): factory = PortFactory(self.host) target_port = self.get_option('compare_port') if target_port: return factory.get(target_port).default_baseline_search_path() return [] def _check_file_exists(self, path_to_file, file_description, override_step=None, more_logging=True): """Verifies that the file is present where expected, or logs an error. Args: file_name: The (human friendly) name or description of the file you're looking for (e.g., "HTTP Server"). Used for error logging. override_step: An optional string to be logged if the check fails. more_logging: Whether or not to log the error messages. Returns: True if the file exists, else False. """ if not self._filesystem.exists(path_to_file): if more_logging: _log.error('Unable to find %s', file_description) _log.error(' at %s', path_to_file) if override_step: _log.error(' %s', override_step) _log.error('') return False return True def check_build(self, needs_http, printer): if not self._check_file_exists(self._path_to_driver(), 'test driver'): return exit_codes.UNEXPECTED_ERROR_EXIT_STATUS if not self._check_driver_build_up_to_date( self.get_option('configuration')): return exit_codes.UNEXPECTED_ERROR_EXIT_STATUS if not self._check_file_exists(self._path_to_image_diff(), 'image_diff'): return exit_codes.UNEXPECTED_ERROR_EXIT_STATUS if self._dump_reader and not self._dump_reader.check_is_functional(): return exit_codes.UNEXPECTED_ERROR_EXIT_STATUS if needs_http and not self.check_httpd(): return exit_codes.UNEXPECTED_ERROR_EXIT_STATUS return exit_codes.OK_EXIT_STATUS def check_sys_deps(self): """Checks whether the system is properly configured. Most checks happen during invocation of the driver prior to running tests. This can be overridden to run custom checks. Returns: An exit status code. """ return exit_codes.OK_EXIT_STATUS def check_httpd(self): httpd_path = self.path_to_apache() if httpd_path: try: env = self.setup_environ_for_server() if self._executive.run_command( [httpd_path, '-v'], env=env, return_exit_code=True) != 0: _log.error('httpd seems broken. Cannot run http tests.') return False return True except OSError as e: _log.error('httpd launch error: ' + repr(e)) _log.error('No httpd found. Cannot run http tests.') return False def do_text_results_differ(self, expected_text, actual_text): return expected_text != actual_text def do_audio_results_differ(self, expected_audio, actual_audio): return expected_audio != actual_audio def diff_image(self, expected_contents, actual_contents, max_channel_diff=None, max_pixels_diff=None): """Compares two images and returns an (image diff, error string) pair. If an error occurs (like image_diff isn't found, or crashes), we log an error and return True (for a diff). """ # If only one of them exists, return that one. if not actual_contents and not expected_contents: return (None, None) if not actual_contents: return (expected_contents, None) if not expected_contents: return (actual_contents, None) tempdir = self._filesystem.mkdtemp() expected_filename = self._filesystem.join(str(tempdir), 'expected.png') self._filesystem.write_binary_file(expected_filename, expected_contents) actual_filename = self._filesystem.join(str(tempdir), 'actual.png') self._filesystem.write_binary_file(actual_filename, actual_contents) diff_filename = self._filesystem.join(str(tempdir), 'diff.png') executable = self._path_to_image_diff() # Although we are handed 'old', 'new', image_diff wants 'new', 'old'. command = [ executable, '--diff', actual_filename, expected_filename, diff_filename ] # Notifies image_diff to allow a tolerance when calculating the pixel # diff. To account for variances when the tests are ran on an actual # GPU. if self.get_option('fuzzy_diff'): command.append('--fuzzy-diff') # The max_channel_diff and max_pixels_diff arguments are used by WPT # tests for fuzzy reftests. See # https://web-platform-tests.org/writing-tests/reftests.html#fuzzy-matching if max_channel_diff is not None: command.append('--fuzzy-max-channel-diff={}'.format('-'.join( map(str, max_channel_diff)))) if max_pixels_diff is not None: command.append('--fuzzy-max-pixels-diff={}'.format('-'.join( map(str, max_pixels_diff)))) result = None err_str = None try: output = self._executive.run_command(command) # Log the output, to enable user debugging of a diff hidden by fuzzy # expectations. This is useful when tightening fuzzy bounds. if output: _log.debug(output) except ScriptError as error: if error.exit_code == 1: result = self._filesystem.read_binary_file(diff_filename) # Log the output, to enable user debugging of the diff. if error.output: _log.debug(error.output) else: err_str = 'Image diff returned an exit code of %s. See http://crbug.com/278596' % error.exit_code except OSError as error: err_str = 'error running image diff: %s' % error finally: self._filesystem.rmtree(str(tempdir)) return (result, err_str or None) def driver_name(self): if self.get_option('driver_name'): return self.get_option('driver_name') return self.CONTENT_SHELL_NAME def expected_baselines_by_extension(self, test_name): """Returns a dict mapping baseline suffix to relative path for each baseline in a test. For reftests, it returns ".==" or ".!=" instead of the suffix. """ # FIXME: The name similarity between this and expected_baselines() # below, is unfortunate. We should probably rename them both. baseline_dict = {} reference_files = self.reference_files(test_name) if reference_files: # FIXME: How should this handle more than one type of reftest? baseline_dict['.' + reference_files[0][0]] = \ self.relative_test_filename(reference_files[0][1]) for extension in self.BASELINE_EXTENSIONS: path = self.expected_filename( test_name, extension, return_default=False) baseline_dict[extension] = self.relative_test_filename( path) if path else path return baseline_dict def output_filename(self, test_name, suffix, extension): """Generates the output filename for a test. This method gives a proper filename for various outputs of a test, including baselines and actual results. Usually, the output filename follows the pattern: test_name_without_ext+suffix+extension, but when the test name contains query strings, e.g. external/wpt/foo.html?wss, test_name_without_ext is mangled to be external/wpt/foo_wss. It is encouraged to use this method instead of writing another mangling. Args: test_name: The name of a test. suffix: A suffix string to add before the extension (e.g. "-expected"). extension: The extension of the output file (starting with .). Returns: A string, the output filename. """ # WPT names might contain query strings, e.g. external/wpt/foo.html?wss, # in which case we mangle test_name_root (the part of a path before the # last extension point) to external/wpt/foo_wss, and the output filename # becomes external/wpt/foo_wss-expected.txt. index = test_name.find('?') if index != -1: test_name_root, _ = self._filesystem.splitext(test_name[:index]) query_part = test_name[index:] test_name_root += self._filesystem.sanitize_filename(query_part) else: test_name_root, _ = self._filesystem.splitext(test_name) return test_name_root + suffix + extension def expected_baselines(self, test_name, extension, all_baselines=False, match=True): """Given a test name, finds where the baseline results are located. Return values will be in the format appropriate for the current platform (e.g., "\\" for path separators on Windows). If the results file is not found, then None will be returned for the directory, but the expected relative pathname will still be returned. This routine is generic but lives here since it is used in conjunction with the other baseline and filename routines that are platform specific. Args: test_name: Name of test file (usually a relative path under web_tests/) extension: File extension of the expected results, including dot; e.g. '.txt' or '.png'. This should not be None, but may be an empty string. all_baselines: If True, return an ordered list of all baseline paths for the given platform. If False, return only the first one. match: Whether the baseline is a match or a mismatch. Returns: A list of (platform_dir, results_filename) pairs, where platform_dir - abs path to the top of the results tree (or test tree) results_filename - relative path from top of tree to the results file (port.join() of the two gives you the full path to the file, unless None was returned.) """ baseline_filename = self.output_filename( test_name, self.BASELINE_SUFFIX if match else self.BASELINE_MISMATCH_SUFFIX, extension) baseline_search_path = self.baseline_search_path() baselines = [] for platform_dir in baseline_search_path: if self._filesystem.exists( self._filesystem.join(platform_dir, baseline_filename)): baselines.append((platform_dir, baseline_filename)) if not all_baselines and baselines: return baselines # If it wasn't found in a platform directory, return the expected # result in the test directory, even if no such file actually exists. platform_dir = self.web_tests_dir() if self._filesystem.exists( self._filesystem.join(platform_dir, baseline_filename)): baselines.append((platform_dir, baseline_filename)) if baselines: return baselines return [(None, baseline_filename)] def expected_filename(self, test_name, extension, return_default=True, fallback_base_for_virtual=True, match=True): """Given a test name, returns an absolute path to its expected results. If no expected results are found in any of the searched directories, the directory in which the test itself is located will be returned. The return value is in the format appropriate for the platform (e.g., "\\" for path separators on windows). This routine is generic but is implemented here to live alongside the other baseline and filename manipulation routines. Args: test_name: Name of test file (usually a relative path under web_tests/) extension: File extension of the expected results, including dot; e.g. '.txt' or '.png'. This should not be None, but may be an empty string. return_default: If True, returns the path to the generic expectation if nothing else is found; if False, returns None. fallback_base_for_virtual: For virtual test only. When no virtual specific baseline is found, if this parameter is True, fallback to find baselines of the base test; if False, depending on |return_default|, returns the generic virtual baseline or None. match: Whether the baseline is a match or a mismatch. Returns: An absolute path to its expected results, or None if not found. """ # The [0] means the first expected baseline (which is the one to be # used) in the fallback paths. platform_dir, baseline_filename = self.expected_baselines( test_name, extension, match=match)[0] if platform_dir: return self._filesystem.join(platform_dir, baseline_filename) if fallback_base_for_virtual: actual_test_name = self.lookup_virtual_test_base(test_name) if actual_test_name: return self.expected_filename( actual_test_name, extension, return_default, match=match) if return_default: return self._filesystem.join(self.web_tests_dir(), baseline_filename) return None def fallback_expected_filename(self, test_name, extension): """Given a test name, returns an absolute path to its next fallback baseline. Args: same as expected_filename() Returns: An absolute path to the next fallback baseline, or None if not found. """ baselines = self.expected_baselines( test_name, extension, all_baselines=True) if len(baselines) < 2: actual_test_name = self.lookup_virtual_test_base(test_name) if actual_test_name: if len(baselines) == 0: return self.fallback_expected_filename( actual_test_name, extension) # In this case, baselines[0] is the current baseline of the # virtual test, so the first base test baseline is the fallback # baseline of the virtual test. return self.expected_filename( actual_test_name, extension, return_default=False) return None platform_dir, baseline_filename = baselines[1] if platform_dir: return self._filesystem.join(platform_dir, baseline_filename) return None def expected_checksum(self, test_name): """Returns the checksum of the image we expect the test to produce, or None if it is a text-only test. """ png_path = self.expected_filename(test_name, '.png') if self._filesystem.exists(png_path): with self._filesystem.open_binary_file_for_reading( png_path) as filehandle: return read_checksum_from_png.read_checksum(filehandle) return None def expected_image(self, test_name): """Returns the image we expect the test to produce.""" baseline_path = self.expected_filename(test_name, '.png') if not self._filesystem.exists(baseline_path): return None return self._filesystem.read_binary_file(baseline_path) def expected_audio(self, test_name): baseline_path = self.expected_filename(test_name, '.wav') if not self._filesystem.exists(baseline_path): return None return self._filesystem.read_binary_file(baseline_path) def expected_text(self, test_name): """Returns the text output we expect the test to produce, or None if we don't expect there to be any text output. End-of-line characters are normalized to '\n'. """ # FIXME: DRT output is actually utf-8, but since we don't decode the # output from DRT (instead treating it as a binary string), we read the # baselines as a binary string, too. baseline_path = self.expected_filename(test_name, '.txt') if not self._filesystem.exists(baseline_path): return None text = self._filesystem.read_binary_file(baseline_path) return text.replace(b'\r\n', b'\n') def expected_subtest_failure(self, test_name): baseline = self.expected_text(test_name) if baseline: baseline = baseline.decode('utf8', 'replace') if re.search(r"^(FAIL|NOTRUN|TIMEOUT)", baseline, re.MULTILINE): return True return False def expected_harness_error(self, test_name): baseline = self.expected_text(test_name) if baseline: baseline = baseline.decode('utf8', 'replace') if re.search(r"^Harness Error\.", baseline, re.MULTILINE): return True return False def reference_files(self, test_name): """Returns a list of expectation (== or !=) and filename pairs""" # Try to find -expected.* or -expected-mismatch.* in the same directory. reftest_list = [] for expectation in ('==', '!='): for extension in Port.supported_file_extensions: path = self.expected_filename( test_name, extension, match=(expectation == '==')) if self._filesystem.exists(path): reftest_list.append((expectation, path)) if reftest_list: return reftest_list # Try to extract information from MANIFEST.json. match = self.WPT_REGEX.match(test_name) if not match: return [] wpt_path = match.group(1) path_in_wpt = match.group(2) for expectation, ref_path_in_wpt in self.wpt_manifest( wpt_path).extract_reference_list(path_in_wpt): ref_absolute_path = self._filesystem.join( self.web_tests_dir(), wpt_path + ref_path_in_wpt) reftest_list.append((expectation, ref_absolute_path)) return reftest_list def tests(self, paths=None): """Returns all tests or tests matching supplied paths. Args: paths: Array of paths to match. If supplied, this function will only return tests matching at least one path in paths. Returns: An array of test paths and test names. The latter are web platform tests that don't correspond to file paths but are valid tests, for instance a file path test.any.js could correspond to two test names: test.any.html and test.any.worker.html. """ tests = self.real_tests(paths) if paths: tests.extend(self._virtual_tests_matching_paths(paths)) if (any(wpt_path in path for wpt_path in self.WPT_DIRS for path in paths) # TODO(robertma): Remove this special case when external/wpt is moved to wpt. or any('external' in path for path in paths)): tests.extend(self._wpt_test_urls_matching_paths(paths)) else: # '/' is used instead of filesystem.sep as the WPT manifest always # uses '/' for paths (it is not OS dependent). wpt_tests = [ wpt_path + '/' + test for wpt_path in self.WPT_DIRS for test in self.wpt_manifest(wpt_path).all_urls() ] tests_by_dir = defaultdict(list) for test in tests + wpt_tests: dirname = os.path.dirname(test) + '/' tests_by_dir[dirname].append(test) tests.extend(self._all_virtual_tests(tests_by_dir)) tests.extend(wpt_tests) return tests def real_tests_from_dict(self, paths, tests_by_dir): """Find all real tests in paths, using results saved in dict.""" files = [] for path in paths: if self._has_supported_extension_for_all(path): files.append(path) continue path = path + '/' if path[-1] != '/' else path for key, value in tests_by_dir.items(): if key.startswith(path): files.extend(value) return files def real_tests(self, paths): """Find all real tests in paths except WPT.""" # When collecting test cases, skip these directories. skipped_directories = set([ 'platform', 'resources', 'support', 'script-tests', 'reference', 'reftest' ]) # Also ignore all WPT directories. Note that this is only an # optimization; is_non_wpt_test_file should skip WPT regardless. skipped_directories |= set(self.WPT_DIRS) files = find_files.find(self._filesystem, self.web_tests_dir(), paths, skipped_directories, lambda _, dirname, filename: self.is_non_wpt_test_file(dirname, filename), self.test_key) return [self.relative_test_filename(f) for f in files] @staticmethod def is_reference_html_file(filesystem, dirname, filename): # TODO(robertma): We probably do not need prefixes/suffixes other than # -expected{-mismatch} any more. Or worse, there might be actual tests # with these prefixes/suffixes. if filename.startswith('ref-') or filename.startswith('notref-'): return True filename_without_ext, _ = filesystem.splitext(filename) for suffix in ['-expected', '-expected-mismatch', '-ref', '-notref']: if filename_without_ext.endswith(suffix): return True return False # When collecting test cases, we include any file with these extensions. supported_file_extensions = set([ '.html', '.xml', '.xhtml', '.xht', '.pl', '.htm', '.php', '.svg', '.mht', '.pdf', ]) def _has_supported_extension_for_all(self, filename): extension = self._filesystem.splitext(filename)[1] if 'inspector-protocol' in filename and extension == '.js': return True if 'devtools' in filename and extension == '.js': return True return extension in self.supported_file_extensions def _has_supported_extension(self, filename): """Returns True if filename is one of the file extensions we want to run a test on.""" extension = self._filesystem.splitext(filename)[1] return extension in self.supported_file_extensions def is_non_wpt_test_file(self, dirname, filename): # Convert dirname to a relative path to web_tests with slashes # normalized and ensure it has a trailing slash. normalized_test_dir = self.relative_test_filename( dirname) + self.TEST_PATH_SEPARATOR if any( normalized_test_dir.startswith(d + self.TEST_PATH_SEPARATOR) for d in self.WPT_DIRS): return False extension = self._filesystem.splitext(filename)[1] if 'inspector-protocol' in dirname and extension == '.js': return True if 'devtools' in dirname and extension == '.js': return True return (self._has_supported_extension(filename) and not Port.is_reference_html_file(self._filesystem, dirname, filename)) @memoized def wpt_manifest(self, path): assert path in self.WPT_DIRS # Convert '/' to the platform-specific separator. path = self._filesystem.normpath(path) manifest_path = self._filesystem.join(self.web_tests_dir(), path, MANIFEST_NAME) if not self._filesystem.exists(manifest_path) or self.get_option( 'manifest_update', False): _log.debug('Generating MANIFEST.json for %s...', path) WPTManifest.ensure_manifest(self, path) return WPTManifest(self.host, manifest_path) def is_wpt_crash_test(self, test_name): """Returns whether a WPT test is a crashtest. See https://web-platform-tests.org/writing-tests/crashtest.html. """ match = self.WPT_REGEX.match(test_name) if not match: return False wpt_path = match.group(1) path_in_wpt = match.group(2) return self.wpt_manifest(wpt_path).is_crash_test(path_in_wpt) def is_slow_wpt_test(self, test_name): # When DCHECK is enabled, idlharness tests run 5-6x slower due to the # amount of JavaScript they use (most web_tests run very little JS). # This causes flaky timeouts for a lot of them, as a 0.5-1s test becomes # close to the default 6s timeout. if (self.is_wpt_idlharness_test(test_name) and self._build_has_dcheck_always_on()): return True match = self.WPT_REGEX.match(test_name) if not match: return False wpt_path = match.group(1) path_in_wpt = match.group(2) return self.wpt_manifest(wpt_path).is_slow_test(path_in_wpt) def get_wpt_fuzzy_metadata(self, test_name): """Returns the fuzzy metadata for the given WPT test. The metadata is a pair of lists, (maxDifference, totalPixels), where each list is a [min, max] range, inclusive. If the test is not a WPT test or has no fuzzy metadata, returns (None, None). See https://web-platform-tests.org/writing-tests/reftests.html#fuzzy-matching """ match = self.WPT_REGEX.match(test_name) if not match: return None, None wpt_path = match.group(1) path_in_wpt = match.group(2) return self.wpt_manifest(wpt_path).extract_fuzzy_metadata(path_in_wpt) def get_file_path_for_wpt_test(self, test_name): """Returns the real file path for the given WPT test. Or None if the test is not a WPT. """ match = self.WPT_REGEX.match(test_name) if not match: return None wpt_path = match.group(1) path_in_wpt = match.group(2) file_path_in_wpt = self.wpt_manifest(wpt_path).file_path_for_test_url( path_in_wpt) if not file_path_in_wpt: return None return self._filesystem.join(wpt_path, file_path_in_wpt) def test_key(self, test_name): """Turns a test name into a pair of sublists: the natural sort key of the dirname, and the natural sort key of the basename. This can be used when sorting paths so that files in a directory. directory are kept together rather than being mixed in with files in subdirectories. """ dirname, basename = self.split_test(test_name) return (self._natural_sort_key(dirname + self.TEST_PATH_SEPARATOR), self._natural_sort_key(basename)) def _natural_sort_key(self, string_to_split): """Turns a string into a list of string and number chunks. For example: "z23a" -> ["z", 23, "a"] This can be used to implement "natural sort" order. See: http://www.codinghorror.com/blog/2007/12/sorting-for-humans-natural-sort-order.html http://nedbatchelder.com/blog/200712.html#e20071211T054956 """ def tryint(val): try: return int(val) except ValueError: return val return [tryint(chunk) for chunk in re.split(r'(\d+)', string_to_split)] def test_dirs(self): """Returns the list of top-level test directories.""" web_tests_dir = self.web_tests_dir() fs = self._filesystem return [ d for d in fs.listdir(web_tests_dir) if fs.isdir(fs.join(web_tests_dir, d)) ] @memoized def test_isfile(self, test_name): """Returns True if the test name refers to an existing test file.""" # Used by test_expectations.py to apply rules to a file. if self._filesystem.isfile(self.abspath_for_test(test_name)): return True base = self.lookup_virtual_test_base(test_name) return base and self._filesystem.isfile(self.abspath_for_test(base)) @memoized def test_isdir(self, test_name): """Returns True if the test name refers to an existing directory of tests.""" # Used by test_expectations.py to apply rules to whole directories. if self._filesystem.isdir(self.abspath_for_test(test_name)): return True base = self.lookup_virtual_test_base(test_name) return base and self._filesystem.isdir(self.abspath_for_test(base)) @memoized def test_exists(self, test_name): """Returns True if the test name refers to an existing test directory or file.""" # Used by lint_test_expectations.py to determine if an entry refers to a # valid test. if self.is_wpt_test(test_name): # A virtual WPT test must have valid virtual prefix and base. if test_name.startswith('virtual/'): return bool(self.lookup_virtual_test_base(test_name)) # Otherwise treat any WPT test as existing regardless of their real # existence on the file system. # TODO(crbug.com/959958): Actually check existence of WPT tests. return True return self.test_isfile(test_name) or self.test_isdir(test_name) def split_test(self, test_name): """Splits a test name into the 'directory' part and the 'basename' part.""" index = test_name.rfind(self.TEST_PATH_SEPARATOR) if index < 1: return ('', test_name) return (test_name[0:index], test_name[index:]) def normalize_test_name(self, test_name): """Returns a normalized version of the test name or test directory.""" if test_name.endswith('/'): return test_name if self.test_isdir(test_name): return test_name + '/' return test_name def driver_cmd_line(self): """Prints the DRT (DumpRenderTree) command that will be used.""" return self.create_driver(0).cmd_line([]) def update_baseline(self, baseline_path, data): """Updates the baseline for a test. Args: baseline_path: the actual path to use for baseline, not the path to the test. This function is used to update either generic or platform-specific baselines, but we can't infer which here. data: contents of the baseline. """ self._filesystem.write_binary_file(baseline_path, data) def _path_from_chromium_base(self, *comps): return self._path_finder.path_from_chromium_base(*comps) def _perf_tests_dir(self): return self._path_finder.perf_tests_dir() def web_tests_dir(self): custom_web_tests_dir = self.get_option('layout_tests_directory') if custom_web_tests_dir: return self._filesystem.abspath(custom_web_tests_dir) return self._path_finder.web_tests_dir() def skips_test(self, test): """Checks whether the given test is skipped for this port. Returns True if the test is skipped because the port runs smoke tests only or because the test is marked as Skip in NeverFixTest (otherwise the test is only marked as Skip indicating a temporary skip). """ return self.skipped_due_to_smoke_tests( test) or self.skipped_in_never_fix_tests(test) @memoized def _tests_from_file(self, filename): tests = set() file_contents = self._filesystem.read_text_file(filename) for line in file_contents.splitlines(): line = line.strip() if line.startswith('#') or not line: continue tests.add(line) return tests def skipped_due_to_smoke_tests(self, test): """Checks if the test is skipped based on the set of Smoke tests. Returns True if this port runs only smoke tests, and the test is not in the smoke tests file; returns False otherwise. """ if not self.default_smoke_test_only(): return False smoke_test_filename = self.path_to_smoke_tests_file() if not self._filesystem.exists(smoke_test_filename): return False smoke_tests = self._tests_from_file(smoke_test_filename) return test not in smoke_tests def path_to_smoke_tests_file(self): return self._filesystem.join(self.web_tests_dir(), 'SmokeTests') def skipped_in_never_fix_tests(self, test): """Checks if the test is marked as Skip in NeverFixTests for this port. Skip in NeverFixTests indicate we will never fix the failure and permanently skip the test. Only Skip lines are allowed in NeverFixTests. Some lines in NeverFixTests are platform-specific. Note: this will not work with skipped directories. See also the same issue with update_all_test_expectations_files in test_importer.py. """ # Note: The parsing logic here (reading the file, constructing a # parser, etc.) is very similar to blinkpy/w3c/test_copier.py. path = self.path_to_never_fix_tests_file() contents = self._filesystem.read_text_file(path) test_expectations = TestExpectations(tags=self.get_platform_tags()) test_expectations.parse_tagged_list(contents) return ResultType.Skip in test_expectations.expectations_for( test).results def path_to_never_fix_tests_file(self): return self._filesystem.join(self.web_tests_dir(), 'NeverFixTests') def name(self): """Returns a name that uniquely identifies this particular type of port. This is the full port name including both base port name and version, and can be passed to PortFactory.get() to instantiate a port. """ return self._name def operating_system(self): raise NotImplementedError def version(self): """Returns a string indicating the version of a given platform For example, "win10" or "trusty". This is used to help identify the exact port when parsing test expectations, determining search paths, and logging information. """ return self._version def architecture(self): return self._architecture def python3_command(self): """Returns the correct command to use to run python3. This exists because Windows has inconsistent behavior between the bots and local developer machines, such that determining which python3 name to use is non-trivial. See https://crbug.com/1155616. Once blinkpy runs under python3, this can be removed in favour of callers using sys.executable. """ if six.PY3: # Prefer sys.executable when the current script runs under python3. # The current script might be running with vpython3 and in that case # using the same executable will share the same virtualenv. return sys.executable return 'python3' def get_option(self, name, default_value=None): return getattr(self._options, name, default_value) def set_option_default(self, name, default_value): return self._options.ensure_value(name, default_value) def relative_test_filename(self, filename):<|fim▁hole|> """Returns a Unix-style path for a filename relative to web_tests. Ports may legitimately return absolute paths here if no relative path makes sense. """ # Ports that run on windows need to override this method to deal with # filenames with backslashes in them. if filename.startswith(self.web_tests_dir()): return self.host.filesystem.relpath(filename, self.web_tests_dir()) else: return self.host.filesystem.abspath(filename) @memoized def abspath_for_test(self, test_name): """Returns the full path to the file for a given test name. This is the inverse of relative_test_filename(). """ return self._filesystem.join(self.web_tests_dir(), test_name) @memoized def args_for_test(self, test_name): args = self._lookup_virtual_test_args(test_name) tracing_categories = self.get_option('enable_tracing') if tracing_categories: args.append('--trace-startup=' + tracing_categories) # Do not finish the trace until the test is finished. args.append('--trace-startup-duration=0') # Append the current time to the output file name to ensure that # the subsequent repetitions of the test do not overwrite older # trace files. current_time = time.strftime("%Y-%m-%d-%H-%M-%S") file_name = 'trace_layout_test_{}_{}.json'.format( self._filesystem.sanitize_filename(test_name), current_time) args.append('--trace-startup-file=' + file_name) return args @memoized def name_for_test(self, test_name): test_base = self.lookup_virtual_test_base(test_name) if test_base and not self._filesystem.exists( self.abspath_for_test(test_name)): return test_base return test_name def bot_test_times_path(self): # TODO(crbug.com/1030434): For the not_site_per_process_blink_web_tests step on linux, # an exception is raised when merging the bot times json files. This happens whenever they # are outputted into the results directory. Temporarily we will return the bot times json # file relative to the target directory. return self._build_path('webkit_test_times', 'bot_times_ms.json') def results_directory(self): """Returns the absolute path directory which will store all web tests outputted files. It may include a sub directory for artifacts and it may store performance test results.""" if not self._results_directory: option_val = self.get_option( 'results_directory') or self.default_results_directory() assert not self._filesystem.basename(option_val) == 'layout-test-results', ( 'crbug.com/1026494, crbug.com/1027708: The layout-test-results sub directory should ' 'not be passed as part of the --results-directory command line argument.') self._results_directory = self._filesystem.abspath(option_val) return self._results_directory def artifacts_directory(self): """Returns path to artifacts sub directory of the results directory. This directory will store test artifacts, which may include actual and expected output from web tests.""" return self._filesystem.join(self.results_directory(), ARTIFACTS_SUB_DIR) def perf_results_directory(self): return self.results_directory() def inspector_build_directory(self): return self._build_path('gen', 'third_party', 'devtools-frontend', 'src', 'front_end') def generated_sources_directory(self): return self._build_path('gen') def apache_config_directory(self): return self._path_finder.path_from_blink_tools('apache_config') def default_results_directory(self): """Returns the absolute path to the build directory.""" return self._build_path() def setup_test_run(self): """Performs port-specific work at the beginning of a test run.""" # Delete the disk cache if any to ensure a clean test run. dump_render_tree_binary_path = self._path_to_driver() cachedir = self._filesystem.dirname(dump_render_tree_binary_path) cachedir = self._filesystem.join(cachedir, 'cache') if self._filesystem.exists(cachedir): self._filesystem.rmtree(cachedir) if self._dump_reader: self._filesystem.maybe_make_directory( self._dump_reader.crash_dumps_directory()) def num_workers(self, requested_num_workers): """Returns the number of available workers (possibly less than the number requested).""" return requested_num_workers def clean_up_test_run(self): """Performs port-specific work at the end of a test run.""" if self._image_differ: self._image_differ.stop() self._image_differ = None def setup_environ_for_server(self): # We intentionally copy only a subset of the environment when # launching subprocesses to ensure consistent test results. clean_env = {} variables_to_copy = [ 'CHROME_DEVEL_SANDBOX', 'CHROME_IPC_LOGGING', 'ASAN_OPTIONS', 'TSAN_OPTIONS', 'MSAN_OPTIONS', 'LSAN_OPTIONS', 'UBSAN_OPTIONS', 'VALGRIND_LIB', 'VALGRIND_LIB_INNER', 'TMPDIR', ] if 'TMPDIR' not in self.host.environ: self.host.environ['TMPDIR'] = tempfile.gettempdir() # CGIs are run directory-relative so they need an absolute TMPDIR self.host.environ['TMPDIR'] = self._filesystem.abspath( self.host.environ['TMPDIR']) if self.host.platform.is_linux() or self.host.platform.is_freebsd(): variables_to_copy += [ 'XAUTHORITY', 'HOME', 'LANG', 'LD_LIBRARY_PATH', 'DBUS_SESSION_BUS_ADDRESS', 'XDG_DATA_DIRS', 'XDG_RUNTIME_DIR' ] clean_env['DISPLAY'] = self.host.environ.get('DISPLAY', ':1') if self.host.platform.is_mac(): clean_env['DYLD_LIBRARY_PATH'] = self._build_path() variables_to_copy += [ 'HOME', ] if self.host.platform.is_win(): variables_to_copy += [ 'PATH', ] for variable in variables_to_copy: if variable in self.host.environ: clean_env[variable] = self.host.environ[variable] for string_variable in self.get_option('additional_env_var', []): [name, value] = string_variable.split('=', 1) clean_env[name] = value return clean_env def show_results_html_file(self, results_filename): """Displays the given HTML file in a user's browser.""" return self.host.user.open_url( abspath_to_uri(self.host.platform, results_filename)) def create_driver(self, worker_number, no_timeout=False): """Returns a newly created Driver subclass for starting/stopping the test driver. """ return self._driver_class()(self, worker_number, no_timeout=no_timeout) def requires_http_server(self): # Does the port require an HTTP server for running tests? This could # be the case when the tests aren't run on the host platform. return False def start_http_server(self, additional_dirs, number_of_drivers, output_dir=''): """Start a web server. Raise an error if it can't start or is already running. Ports can stub this out if they don't need a web server to be running. """ assert not self._http_server, 'Already running an http server.' output_dir = output_dir or self.artifacts_directory() server = apache_http.ApacheHTTP( self, output_dir, additional_dirs=additional_dirs, number_of_servers=(number_of_drivers * 4)) server.start() self._http_server = server def start_websocket_server(self, output_dir=''): """Start a web server. Raise an error if it can't start or is already running. Ports can stub this out if they don't need a websocket server to be running. """ assert not self._websocket_server, 'Already running a websocket server.' output_dir = output_dir or self.artifacts_directory() server = pywebsocket.PyWebSocket( self, output_dir, python_executable=self._options.python_executable) server.start() self._websocket_server = server @staticmethod def is_wpt_test(test): """Whether a test is considered a web-platform-tests test.""" return Port.WPT_REGEX.match(test) @staticmethod def is_wpt_idlharness_test(test_file): """Returns whether a WPT test is (probably) an idlharness test. There are no rules in WPT that can be used to identify idlharness tests without examining the file contents (which would be expensive). This method utilizes a filename heuristic, based on the convention of including 'idlharness' in the appropriate test names. """ match = Port.WPT_REGEX.match(test_file) if not match: return False filename = match.group(2).split('/')[-1] return 'idlharness' in filename @staticmethod def should_use_wptserve(test): return Port.is_wpt_test(test) def start_wptserve(self, output_dir=''): """Starts a WPT web server. Raises an error if it can't start or is already running. """ assert not self._wpt_server, 'Already running a WPT server.' output_dir = output_dir or self.artifacts_directory() # We currently don't support any output mechanism for the WPT server. server = wptserve.WPTServe(self, output_dir) server.start() self._wpt_server = server def stop_wptserve(self): """Shuts down the WPT server if it is running.""" if self._wpt_server: self._wpt_server.stop() self._wpt_server = None def http_server_requires_http_protocol_options_unsafe(self): httpd_path = self.path_to_apache() intentional_syntax_error = 'INTENTIONAL_SYNTAX_ERROR' # yapf: disable cmd = [ httpd_path, '-t', '-f', self.path_to_apache_config_file(), '-C', 'ServerRoot "%s"' % self.apache_server_root(), '-C', 'HttpProtocolOptions Unsafe', '-C', intentional_syntax_error ] # yapf: enable env = self.setup_environ_for_server() def error_handler(err): pass output = self._executive.run_command( cmd, env=env, error_handler=error_handler) # If apache complains about the intentional error, it apparently # accepted the HttpProtocolOptions directive, and we should add it. return intentional_syntax_error in output def http_server_supports_ipv6(self): # Apache < 2.4 on win32 does not support IPv6. return not self.host.platform.is_win() def stop_http_server(self): """Shuts down the http server if it is running.""" if self._http_server: self._http_server.stop() self._http_server = None def stop_websocket_server(self): """Shuts down the websocket server if it is running.""" if self._websocket_server: self._websocket_server.stop() self._websocket_server = None # # TEST EXPECTATION-RELATED METHODS # def test_configuration(self): """Returns the current TestConfiguration for the port.""" if not self._test_configuration: self._test_configuration = TestConfiguration( self._version, self._architecture, self._options.configuration.lower()) return self._test_configuration # FIXME: Belongs on a Platform object. @memoized def all_test_configurations(self): """Returns a list of TestConfiguration instances, representing all available test configurations for this port. """ return self._generate_all_test_configurations() # FIXME: Belongs on a Platform object. def configuration_specifier_macros(self): """Ports may provide a way to abbreviate configuration specifiers to conveniently refer to them as one term or alias specific values to more generic ones. For example: (vista, win7) -> win # Abbreviate all Windows versions into one namesake. (precise, trusty) -> linux # Change specific name of Linux distro to a more generic term. Returns a dictionary, each key representing a macro term ('win', for example), and value being a list of valid configuration specifiers (such as ['vista', 'win7']). """ return self.CONFIGURATION_SPECIFIER_MACROS def _generate_all_test_configurations(self): """Returns a sequence of the TestConfigurations the port supports.""" # By default, we assume we want to test every graphics type in # every configuration on every system. test_configurations = [] for version, architecture in self.ALL_SYSTEMS: for build_type in self.ALL_BUILD_TYPES: test_configurations.append( TestConfiguration(version, architecture, build_type)) return test_configurations def _flag_specific_expectations_path(self): config_name = self.flag_specific_config_name() if config_name: return self.path_to_flag_specific_expectations_file(config_name) def _flag_specific_baseline_search_path(self): config_name = self.flag_specific_config_name() if not config_name: return [] flag_dir = self._filesystem.join(self.web_tests_dir(), 'flag-specific', config_name) platform_dirs = [ self._filesystem.join(flag_dir, 'platform', platform_dir) for platform_dir in self.FALLBACK_PATHS[self.version()] ] return platform_dirs + [flag_dir] def expectations_dict(self): """Returns an OrderedDict of name -> expectations strings. The names are expected to be (but not required to be) paths in the filesystem. If the name is a path, the file can be considered updatable for things like rebaselining, so don't use names that are paths if they're not paths. Generally speaking the ordering should be files in the filesystem in cascade order (TestExpectations followed by Skipped, if the port honors both formats), then any built-in expectations (e.g., from compile-time exclusions), then --additional-expectations options. """ # FIXME: rename this to test_expectations() once all the callers are # updated to know about the ordered dict. expectations = collections.OrderedDict() default_expectations_files = set(self.default_expectations_files()) ignore_default = self.get_option('ignore_default_expectations', False) for path in self.used_expectations_files(): is_default = path in default_expectations_files if ignore_default and is_default: continue path_exists = self._filesystem.exists(path) if is_default: if path_exists: expectations[path] = self._filesystem.read_text_file(path) else: if path_exists: _log.debug( "reading additional_expectations from path '%s'", path) expectations[path] = self._filesystem.read_text_file(path) else: # TODO(rmhasan): Fix additional expectation paths for # not_site_per_process_blink_web_tests, then change this # back to raising exceptions for incorrect expectation # paths. _log.warning( "additional_expectations path '%s' does not exist", path) return expectations def all_expectations_dict(self): """Returns an OrderedDict of name -> expectations strings.""" expectations = self.expectations_dict() flag_path = self._filesystem.join(self.web_tests_dir(), self.FLAG_EXPECTATIONS_PREFIX) if not self._filesystem.exists(flag_path): return expectations for (_, _, filenames) in self._filesystem.walk(flag_path): if 'README.txt' in filenames: filenames.remove('README.txt') if 'PRESUBMIT.py' in filenames: filenames.remove('PRESUBMIT.py') for filename in filenames: path = self._filesystem.join(flag_path, filename) try: expectations[path] = self._filesystem.read_text_file(path) except UnicodeDecodeError: _log.error('Failed to read expectations file: \'%s\'', path) raise return expectations def bot_expectations(self): if not self.get_option('ignore_flaky_tests'): return {} full_port_name = self.determine_full_port_name( self.host, self._options, self.port_name) builder_category = self.get_option('ignore_builder_category', 'layout') factory = BotTestExpectationsFactory(self.host.builders) # FIXME: This only grabs release builder's flakiness data. If we're running debug, # when we should grab the debug builder's data. expectations = factory.expectations_for_port(full_port_name, builder_category) if not expectations: return {} ignore_mode = self.get_option('ignore_flaky_tests') if ignore_mode == 'very-flaky' or ignore_mode == 'maybe-flaky': return expectations.flakes_by_path(ignore_mode == 'very-flaky') if ignore_mode == 'unexpected': return expectations.unexpected_results_by_path() _log.warning("Unexpected ignore mode: '%s'.", ignore_mode) return {} def default_expectations_files(self): """Returns a list of paths to expectations files that apply by default. There are other "test expectations" files that may be applied if the --additional-expectations flag is passed; those aren't included here. """ return filter(None, [ self.path_to_generic_test_expectations_file(), self.path_to_webdriver_expectations_file(), self._filesystem.join(self.web_tests_dir(), 'NeverFixTests'), self._filesystem.join(self.web_tests_dir(), 'StaleTestExpectations'), self._filesystem.join(self.web_tests_dir(), 'SlowTests') ]) def used_expectations_files(self): """Returns a list of paths to expectation files that are used.""" if self._used_expectation_files is None: self._used_expectation_files = list( self.default_expectations_files()) flag_specific = self._flag_specific_expectations_path() if flag_specific: self._used_expectation_files.append(flag_specific) for path in self.get_option('additional_expectations', []): expanded_path = self._filesystem.expanduser(path) abs_path = self._filesystem.abspath(expanded_path) self._used_expectation_files.append(abs_path) return self._used_expectation_files def extra_expectations_files(self): """Returns a list of paths to test expectations not loaded by default. These paths are passed via --additional-expectations on some builders. """ return [ self._filesystem.join(self.web_tests_dir(), 'ASANExpectations'), self._filesystem.join(self.web_tests_dir(), 'LeakExpectations'), self._filesystem.join(self.web_tests_dir(), 'MSANExpectations'), ] @memoized def path_to_generic_test_expectations_file(self): return self._filesystem.join(self.web_tests_dir(), 'TestExpectations') @memoized def path_to_webdriver_expectations_file(self): return self._filesystem.join(self.web_tests_dir(), 'WebDriverExpectations') def path_to_flag_specific_expectations_file(self, flag_specific): return self._filesystem.join(self.web_tests_dir(), self.FLAG_EXPECTATIONS_PREFIX, flag_specific) def repository_path(self): """Returns the repository path for the chromium code base.""" return self._path_from_chromium_base('build') def default_configuration(self): return 'Release' def clobber_old_port_specific_results(self): pass # FIXME: This does not belong on the port object. @memoized def path_to_apache(self): """Returns the full path to the apache binary. This is needed only by ports that use the apache_http_server module. """ raise NotImplementedError('Port.path_to_apache') def apache_server_root(self): """Returns the root that the apache binary is installed to. This is used for the ServerRoot directive. """ executable = self.path_to_apache() return self._filesystem.dirname(self._filesystem.dirname(executable)) def path_to_apache_config_file(self): """Returns the full path to the apache configuration file. If the WEBKIT_HTTP_SERVER_CONF_PATH environment variable is set, its contents will be used instead. This is needed only by ports that use the apache_http_server module. """ config_file_from_env = self.host.environ.get( 'WEBKIT_HTTP_SERVER_CONF_PATH') if config_file_from_env: if not self._filesystem.exists(config_file_from_env): raise IOError( '%s was not found on the system' % config_file_from_env) return config_file_from_env config_file_name = self._apache_config_file_name_for_platform() return self._filesystem.join(self.apache_config_directory(), config_file_name) def _apache_version(self): config = self._executive.run_command([self.path_to_apache(), '-v']) # Log version including patch level. _log.debug( 'Found apache version %s', re.sub( r'(?:.|\n)*Server version: Apache/(\d+\.\d+(?:\.\d+)?)(?:.|\n)*', r'\1', config)) return re.sub(r'(?:.|\n)*Server version: Apache/(\d+\.\d+)(?:.|\n)*', r'\1', config) def _apache_config_file_name_for_platform(self): if self.host.platform.is_linux(): distribution = self.host.platform.linux_distribution() custom_configurations = ['arch', 'debian', 'fedora', 'redhat'] if distribution in custom_configurations: return '%s-httpd-%s.conf' % (distribution, self._apache_version()) return 'apache2-httpd-' + self._apache_version() + '.conf' def _path_to_driver(self, target=None): """Returns the full path to the test driver.""" return self._build_path(target, self.driver_name()) def _path_to_image_diff(self): """Returns the full path to the image_diff binary, or None if it is not available. This is likely used only by diff_image() """ return self._build_path('image_diff') def _absolute_baseline_path(self, platform_dir): """Return the absolute path to the top of the baseline tree for a given platform directory. """ return self._filesystem.join(self.web_tests_dir(), 'platform', platform_dir) def _driver_class(self): """Returns the port's driver implementation.""" return driver.Driver def output_contains_sanitizer_messages(self, output): if not output: return None if (b'AddressSanitizer' in output) or (b'MemorySanitizer' in output): return True return False def _get_crash_log(self, name, pid, stdout, stderr, newer_than): if self.output_contains_sanitizer_messages(stderr): # Running the symbolizer script can take a lot of memory, so we need to # serialize access to it across all the concurrently running drivers. llvm_symbolizer_path = self._path_from_chromium_base( 'third_party', 'llvm-build', 'Release+Asserts', 'bin', 'llvm-symbolizer') if self._filesystem.exists(llvm_symbolizer_path): env = self.host.environ.copy() env['LLVM_SYMBOLIZER_PATH'] = llvm_symbolizer_path else: env = None sanitizer_filter_path = self._path_from_chromium_base( 'tools', 'valgrind', 'asan', 'asan_symbolize.py') sanitizer_strip_path_prefix = 'Release/../../' if self._filesystem.exists(sanitizer_filter_path): stderr = self._executive.run_command([ 'flock', sys.executable, sanitizer_filter_path, sanitizer_strip_path_prefix ], input=stderr, decode_output=False, env=env) name_str = name or '<unknown process name>' pid_str = str(pid or '<unknown>') # We require stdout and stderr to be bytestrings, not character strings. if stdout: stdout_lines = stdout.decode('utf8', 'replace').splitlines() else: stdout_lines = [u'<empty>'] if stderr: stderr_lines = stderr.decode('utf8', 'replace').splitlines() else: stderr_lines = [u'<empty>'] return (stderr, ('crash log for %s (pid %s):\n%s\n%s\n' % (name_str, pid_str, '\n'.join( ('STDOUT: ' + l) for l in stdout_lines), '\n'.join( ('STDERR: ' + l) for l in stderr_lines))).encode('utf8', 'replace'), self._get_crash_site(stderr_lines)) def _get_crash_site(self, stderr_lines): # [blah:blah:blah:FATAL: prefix_re = r'\[[\w:/.]*FATAL:' # crash_file.ext(line) site_re = r'(?P<site>[\w_]*\.[\w_]*\(\d*\))' # ] blah failed suffix_re = r'\]\s*(Check failed|Security DCHECK failed)' pattern = re.compile(prefix_re + site_re + suffix_re) for line in stderr_lines: match = pattern.search(line) if match: return match.group('site') return None def look_for_new_crash_logs(self, crashed_processes, start_time): pass def look_for_new_samples(self, unresponsive_processes, start_time): pass def sample_process(self, name, pid): pass def virtual_test_suites(self): if self._virtual_test_suites is None: path_to_virtual_test_suites = self._filesystem.join( self.web_tests_dir(), 'VirtualTestSuites') assert self._filesystem.exists(path_to_virtual_test_suites), \ path_to_virtual_test_suites + ' not found' try: test_suite_json = json.loads( self._filesystem.read_text_file( path_to_virtual_test_suites)) self._virtual_test_suites = [] for json_config in test_suite_json: vts = VirtualTestSuite(**json_config) if any(vts.full_prefix == s.full_prefix for s in self._virtual_test_suites): raise ValueError( '{} contains entries with the same prefix: {!r}. Please combine them' .format(path_to_virtual_test_suites, json_config)) self._virtual_test_suites.append(vts) except ValueError as error: raise ValueError('{} is not a valid JSON file: {}'.format( path_to_virtual_test_suites, error)) return self._virtual_test_suites def _all_virtual_tests(self, tests_by_dir): tests = [] for suite in self.virtual_test_suites(): if suite.bases: tests.extend(map(lambda x: suite.full_prefix + x, self.real_tests_from_dict(suite.bases, tests_by_dir))) return tests def _get_bases_for_suite_with_paths(self, suite, paths): """Returns a set of bases of the virutual suite that are referenced by paths. E.g. given a virtual test suite `foo` with the following bases: bar/baz bar/quu qux and given paths of [virtual/foo/bar], this method would return [bar/baz, bar/quu] Given paths of [virtual/foo/bar/baz/test.html], the return would be [bar/baz] """ real_paths = [p.replace(suite.full_prefix, '', 1) for p in paths \ if p.startswith(suite.full_prefix)] # Test for paths that are under the suite's bases, so that we don't run # a non-existent test. bases = set() for real_path in real_paths: for base in suite.bases: if real_path.startswith(base) or base.startswith(real_path): bases.add(base) return list(bases) def _virtual_tests_for_suite_with_paths(self, suite, paths): if not suite.bases: return [] bases = self._get_bases_for_suite_with_paths(suite, paths) if not bases: return [] tests = [] tests.extend( map(lambda x: suite.full_prefix + x, self.real_tests(bases))) wpt_bases = [] for base in bases: if any(base.startswith(wpt_dir) for wpt_dir in self.WPT_DIRS): wpt_bases.append(base) if wpt_bases: tests.extend( self._wpt_test_urls_matching_paths( wpt_bases, [suite.full_prefix] * len(wpt_bases))) return tests def _virtual_tests_matching_paths(self, paths): tests = [] normalized_paths = [self.normalize_test_name(p) for p in paths] for suite in self.virtual_test_suites(): virtual_paths = [ p for p in normalized_paths if p.startswith(suite.full_prefix) ] if not virtual_paths: continue for test in self._virtual_tests_for_suite_with_paths( suite, virtual_paths): if any(test.startswith(p) for p in normalized_paths): tests.append(test) if any(self._path_has_wildcard(path) for path in paths): _log.warning( 'WARNING: Wildcards in paths are not supported for virtual test suites.' ) return tests def _path_has_wildcard(self, path): return '*' in path def _wpt_test_urls_matching_paths(self, filter_paths, virtual_prefixes=[]): """Returns a set of paths that are tests to be run from the web-platform-test manifest files. filter_paths: A list of strings that are prefix matched against the list of tests in the WPT manifests. Only tests that match are returned. virtual_prefixes: A list of prefixes corresponding to paths in |filter_paths|. If present, each test path output should have its virtual prefix prepended to the resulting path to the test. """ # Generate the manifest files if needed and then read them. Do this once # for this whole method as the file is large and generation/loading is # slow. wpts = [(wpt_path, self.wpt_manifest(wpt_path)) for wpt_path in self.WPT_DIRS] tests = [] # This walks through the set of paths where we should look for tests. # For each path, a map can be provided that we replace 'path' with in # the result. for filter_path, virtual_prefix in zip_longest(filter_paths, virtual_prefixes): # This is to make sure "external[\\/]?" can also match to # external/wpt. # TODO(robertma): Remove this special case when external/wpt is # moved to wpt. if filter_path.rstrip('\\/').endswith('external'): filter_path = self._filesystem.join(filter_path, 'wpt') # '/' is used throughout this function instead of filesystem.sep as # the WPT manifest always uses '/' for paths (it is not OS # dependent). if self._filesystem.sep != '/': filter_path = filter_path.replace(self._filesystem.sep, '/') # Drop empty path components. filter_path = filter_path.replace('//', '/') # We now have in |filter_path| a path to an actual test directory or file # on disk, in unix format, relative to the root of the web_tests # directory. for wpt_path, wpt_manifest in wpts: # If the |filter_path| is not inside a WPT dir, then we will # match no tests in the manifest. if not filter_path.startswith(wpt_path): continue # Drop the WPT prefix (including the joining '/') from |path|. filter_path_from_wpt = filter_path[len(wpt_path) + 1:] # An empty filter matches everything. if filter_path_from_wpt: # If the filter is to a specific test file that ends with .js, # we match that against tests with any extension by dropping # the extension from the filter. # # Else, when matching a directory, ensure the filter ends in '/' # to only match the exact directory name and not directories # with the filter as a prefix. if wpt_manifest.is_test_file(filter_path_from_wpt): filter_path_from_wpt = re.sub(r'\.js$', '.', filter_path_from_wpt) elif not wpt_manifest.is_test_url(filter_path_from_wpt): filter_path_from_wpt = filter_path_from_wpt.rstrip( '/') + '/' # We now have a path to an actual test directory or file on # disk, in unix format, relative to the WPT directory. # # Look for all tests in the manifest that are under the relative # |filter_path_from_wpt|. for test_path_from_wpt in wpt_manifest.all_urls(): assert not test_path_from_wpt.startswith('/') assert not test_path_from_wpt.endswith('/') # Drop empty path components. test_path_from_wpt = test_path_from_wpt.replace('//', '/') if test_path_from_wpt.startswith(filter_path_from_wpt): # The result is a test path from the root web test # directory. If a |virtual_prefix| was given, we prepend # that to the result. prefix = virtual_prefix if virtual_prefix else '' tests.append(prefix + wpt_path + '/' + test_path_from_wpt) return tests def _lookup_virtual_suite(self, test_name): if not test_name.startswith('virtual/'): return None for suite in self.virtual_test_suites(): if test_name.startswith(suite.full_prefix): return suite return None def lookup_virtual_test_base(self, test_name): suite = self._lookup_virtual_suite(test_name) if not suite: return None assert test_name.startswith(suite.full_prefix) maybe_base = self.normalize_test_name( test_name[len(suite.full_prefix):]) for base in suite.bases: normalized_base = self.normalize_test_name(base) if normalized_base.startswith(maybe_base) or maybe_base.startswith( normalized_base): return maybe_base return None def _lookup_virtual_test_args(self, test_name): normalized_test_name = self.normalize_test_name(test_name) for suite in self.virtual_test_suites(): if normalized_test_name.startswith(suite.full_prefix): return suite.args return [] def _build_path(self, *comps): """Returns a path from the build directory.""" return self._build_path_with_target(self._options.target, *comps) def _build_path_with_target(self, target, *comps): target = target or self.get_option('target') return self._filesystem.join( self._path_from_chromium_base(), self.get_option('build_directory') or 'out', target, *comps) def _check_driver_build_up_to_date(self, target): # FIXME: We should probably get rid of this check altogether as it has # outlived its usefulness in a GN-based world, but for the moment we # will just check things if they are using the standard Debug or Release # target directories. if target not in ('Debug', 'Release'): return True try: debug_path = self._path_to_driver('Debug') release_path = self._path_to_driver('Release') debug_mtime = self._filesystem.mtime(debug_path) release_mtime = self._filesystem.mtime(release_path) if (debug_mtime > release_mtime and target == 'Release' or release_mtime > debug_mtime and target == 'Debug'): most_recent_binary = 'Release' if target == 'Debug' else 'Debug' _log.warning( 'You are running the %s binary. However the %s binary appears to be more recent. ' 'Please pass --%s.', target, most_recent_binary, most_recent_binary.lower()) _log.warning('') # This will fail if we don't have both a debug and release binary. # That's fine because, in this case, we must already be running the # most up-to-date one. except OSError: pass return True def _get_font_files(self): """Returns list of font files that should be used by the test.""" # TODO(sergeyu): Currently FONT_FILES is valid only on Linux. Make it # usable on other platforms if necessary. result = [] for (font_dirs, font_file, package) in FONT_FILES: exists = False for font_dir in font_dirs: font_path = self._filesystem.join(font_dir, font_file) if not self._filesystem.isabs(font_path): font_path = self._build_path(font_path) if self._check_file_exists(font_path, '', more_logging=False): result.append(font_path) exists = True break if not exists: message = 'You are missing %s under %s.' % (font_file, font_dirs) if package: message += ' Try installing %s. See build instructions.' % package _log.error(message) raise TestRunException(exit_codes.SYS_DEPS_EXIT_STATUS, message) return result @staticmethod def split_webdriver_test_name(test_name): """Splits a WebDriver test name into a filename and a subtest name and returns both of them. E.g. test.py>>foo.html -> (test.py, foo.html) test.py -> (test.py, None) """ separator_index = test_name.find(Port.WEBDRIVER_SUBTEST_SEPARATOR) if separator_index == -1: return (test_name, None) webdriver_test_name = test_name[:separator_index] separator_len = len(Port.WEBDRIVER_SUBTEST_SEPARATOR) subtest_suffix = test_name[separator_index + separator_len:] return (webdriver_test_name, subtest_suffix) @staticmethod def add_webdriver_subtest_suffix(test_name, subtest_name): """Appends a subtest name to a WebDriver test name. E.g. (test.py, foo.html) -> test.py>>foo.html (test.py, None) -> test.py """ if subtest_name: return test_name + Port.WEBDRIVER_SUBTEST_SEPARATOR + subtest_name return test_name @staticmethod def split_webdriver_subtest_pytest_name(test_name): """Splits a WebDriver test name in pytest format into a filename and a subtest name and returns both of them. E.g. test.py::foo.html -> (test.py, foo.html) test.py -> (test.py, None) """ names_after_split = test_name.split( Port.WEBDRIVER_SUBTEST_PYTEST_SEPARATOR) assert len(names_after_split) <= 2, \ "%s has a length greater than 2 after split by ::" % (test_name) if len(names_after_split) == 1: return (names_after_split[0], None) return (names_after_split[0], names_after_split[1]) @staticmethod def add_webdriver_subtest_pytest_suffix(test_name, subtest_name): if subtest_name is None: return test_name return test_name + Port.WEBDRIVER_SUBTEST_PYTEST_SEPARATOR + subtest_name class VirtualTestSuite(object): def __init__(self, prefix=None, bases=None, args=None): assert VALID_FILE_NAME_REGEX.match(prefix), \ "Virtual test suite prefix '{}' contains invalid characters".format(prefix) assert isinstance(bases, list) assert args assert isinstance(args, list) self.full_prefix = 'virtual/' + prefix + '/' self.bases = bases self.args = args def __repr__(self): return "VirtualTestSuite('%s', %s, %s)" % (self.full_prefix, self.bases, self.args)<|fim▁end|>
<|file_name|>user_domain_test.py<|end_file_name|><|fim▁begin|># coding: utf-8 # # Copyright 2018 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for user domain objects.""" from __future__ import absolute_import # pylint: disable=import-only-modules from __future__ import unicode_literals # pylint: disable=import-only-modules from core.domain import user_domain from core.tests import test_utils import feconf import utils # This mock class will not be needed once the schema version is >=2 for the # original class ModifiableUserData. Tests below using this class should also # be modified then. class MockModifiableUserData(user_domain.ModifiableUserData): """A mock ModifiableUserData class that adds a new attribute to the original class to create a new version of the schema for testing migration of old schema user data dict to latest one. """ def __init__( self, display_alias, pin, preferred_language_codes, preferred_site_language_code, preferred_audio_language_code, version, user_id=None, fake_field=None): super(MockModifiableUserData, self).__init__( display_alias, pin, preferred_language_codes, preferred_site_language_code, preferred_audio_language_code, version, user_id=None) self.fake_field = fake_field CURRENT_SCHEMA_VERSION = 2 <|fim▁hole|> @classmethod def from_dict(cls, modifiable_user_data_dict): return MockModifiableUserData( modifiable_user_data_dict['display_alias'], modifiable_user_data_dict['pin'], modifiable_user_data_dict['preferred_language_codes'], modifiable_user_data_dict['preferred_site_language_code'], modifiable_user_data_dict['preferred_audio_language_code'], modifiable_user_data_dict['schema_version'], modifiable_user_data_dict['user_id'], modifiable_user_data_dict['fake_field'] ) # Adding a new method to convert v1 schema data dict to v2. @classmethod def _convert_v1_dict_to_v2_dict(cls, user_data_dict): """Mock function to convert v1 dict to v2.""" user_data_dict['schema_version'] = 2 user_data_dict['fake_field'] = 'default_value' return user_data_dict # Overiding method to first convert raw user data dict to latest version # then returning a ModifiableUserData domain object. @classmethod def from_raw_dict(cls, raw_user_data_dict): intial_schema_version = raw_user_data_dict['schema_version'] data_schema_version = intial_schema_version user_data_dict = raw_user_data_dict if data_schema_version == 1: user_data_dict = cls._convert_v1_dict_to_v2_dict(user_data_dict) return MockModifiableUserData.from_dict(user_data_dict) class UserGlobalPrefsTests(test_utils.GenericTestBase): """Test domain object for user global email preferences.""" def test_initialization(self): """Testing init method.""" user_global_prefs = (user_domain.UserGlobalPrefs( True, False, True, False)) self.assertTrue(user_global_prefs.can_receive_email_updates) self.assertFalse(user_global_prefs.can_receive_editor_role_email) self.assertTrue(user_global_prefs.can_receive_feedback_message_email) self.assertFalse(user_global_prefs.can_receive_subscription_email) def test_create_default_prefs(self): """Testing create_default_prefs.""" default_user_global_prefs = ( user_domain.UserGlobalPrefs.create_default_prefs()) self.assertEqual( default_user_global_prefs.can_receive_email_updates, feconf.DEFAULT_EMAIL_UPDATES_PREFERENCE) self.assertEqual( default_user_global_prefs.can_receive_editor_role_email, feconf.DEFAULT_EDITOR_ROLE_EMAIL_PREFERENCE) self.assertEqual( default_user_global_prefs.can_receive_feedback_message_email, feconf.DEFAULT_FEEDBACK_MESSAGE_EMAIL_PREFERENCE) self.assertEqual( default_user_global_prefs.can_receive_subscription_email, feconf.DEFAULT_SUBSCRIPTION_EMAIL_PREFERENCE) class UserExplorationPrefsTests(test_utils.GenericTestBase): """Test domain object for user exploration email preferences.""" def test_initialization(self): """Testing init method.""" user_exp_prefs = (user_domain.UserExplorationPrefs( False, True)) mute_feedback_notifications = ( user_exp_prefs.mute_feedback_notifications) mute_suggestion_notifications = ( user_exp_prefs.mute_suggestion_notifications) self.assertFalse(mute_feedback_notifications) self.assertTrue(mute_suggestion_notifications) def test_create_default_prefs(self): """Testing create_default_prefs.""" default_user_exp_prefs = ( user_domain.UserExplorationPrefs.create_default_prefs()) self.assertEqual( default_user_exp_prefs.mute_feedback_notifications, feconf.DEFAULT_FEEDBACK_NOTIFICATIONS_MUTED_PREFERENCE) self.assertEqual( default_user_exp_prefs.mute_suggestion_notifications, feconf.DEFAULT_SUGGESTION_NOTIFICATIONS_MUTED_PREFERENCE) def test_to_dict(self): """Testing to_dict.""" user_exp_prefs = (user_domain.UserExplorationPrefs( False, True)) default_user_global_prefs = ( user_domain.UserExplorationPrefs.create_default_prefs()) test_dict = user_exp_prefs.to_dict() default_dict = default_user_global_prefs.to_dict() self.assertEqual( test_dict, { 'mute_feedback_notifications': False, 'mute_suggestion_notifications': True } ) self.assertEqual( default_dict, { 'mute_feedback_notifications': feconf.DEFAULT_FEEDBACK_NOTIFICATIONS_MUTED_PREFERENCE, 'mute_suggestion_notifications': feconf.DEFAULT_SUGGESTION_NOTIFICATIONS_MUTED_PREFERENCE } ) class ExpUserLastPlaythroughTests(test_utils.GenericTestBase): """Testing domain object for an exploration last playthrough model.""" def test_initialization(self): """Testing init method.""" exp_last_playthrough = (user_domain.ExpUserLastPlaythrough( 'user_id0', 'exp_id0', 0, 'last_updated', 'state0')) self.assertEqual( exp_last_playthrough.id, 'user_id0.exp_id0') self.assertEqual( exp_last_playthrough.user_id, 'user_id0') self.assertEqual( exp_last_playthrough.exploration_id, 'exp_id0') self.assertEqual( exp_last_playthrough.last_played_exp_version, 0) self.assertEqual( exp_last_playthrough.last_updated, 'last_updated') self.assertEqual( exp_last_playthrough.last_played_state_name, 'state0') def test_update_last_played_information(self): """Testing update_last_played_information.""" exp_last_playthrough = (user_domain.ExpUserLastPlaythrough( 'user_id0', 'exp_id0', 0, 'last_updated', 'state0')) self.assertEqual( exp_last_playthrough.last_played_exp_version, 0) self.assertEqual( exp_last_playthrough.last_played_state_name, 'state0') exp_last_playthrough.update_last_played_information(1, 'state1') self.assertEqual( exp_last_playthrough.last_played_exp_version, 1) self.assertEqual( exp_last_playthrough.last_played_state_name, 'state1') class IncompleteActivitiesTests(test_utils.GenericTestBase): """Testing domain object for incomplete activities model.""" def test_initialization(self): """Testing init method.""" incomplete_activities = (user_domain.IncompleteActivities( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertEqual(incomplete_activities.id, 'user_id0') self.assertListEqual( incomplete_activities.exploration_ids, ['exp_id0']) self.assertListEqual( incomplete_activities.collection_ids, ['collect_id0']) def test_add_exploration_id(self): """Testing add_exploration_id.""" incomplete_activities = (user_domain.IncompleteActivities( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( incomplete_activities.exploration_ids, ['exp_id0']) incomplete_activities.add_exploration_id('exp_id1') self.assertListEqual( incomplete_activities.exploration_ids, ['exp_id0', 'exp_id1']) def test_remove_exploration_id(self): """Testing remove_exploration_id.""" incomplete_activities = (user_domain.IncompleteActivities( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( incomplete_activities.exploration_ids, ['exp_id0']) incomplete_activities.remove_exploration_id('exp_id0') self.assertListEqual( incomplete_activities.exploration_ids, []) def test_add_collection_id(self): """Testing add_collection_id.""" incomplete_activities = (user_domain.IncompleteActivities( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( incomplete_activities.collection_ids, ['collect_id0']) incomplete_activities.add_collection_id('collect_id1') self.assertListEqual( incomplete_activities.collection_ids, ['collect_id0', 'collect_id1']) def test_remove_collection_id(self): """Testing remove_collection_id.""" incomplete_activities = (user_domain.IncompleteActivities( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( incomplete_activities.collection_ids, ['collect_id0']) incomplete_activities.remove_collection_id('collect_id0') self.assertListEqual( incomplete_activities.collection_ids, []) class CompletedActivitiesTests(test_utils.GenericTestBase): """Testing domain object for the activities completed.""" def test_initialization(self): """Testing init method.""" completed_activities = (user_domain.CompletedActivities( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertEqual('user_id0', completed_activities.id) self.assertListEqual( completed_activities.exploration_ids, ['exp_id0']) self.assertListEqual( completed_activities.collection_ids, ['collect_id0']) def test_add_exploration_id(self): """Testing add_exploration_id.""" completed_activities = (user_domain.CompletedActivities( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( completed_activities.exploration_ids, ['exp_id0']) completed_activities.add_exploration_id('exp_id1') self.assertListEqual( completed_activities.exploration_ids, ['exp_id0', 'exp_id1']) def test_remove_exploration_id(self): """Testing remove_exploration_id.""" completed_activities = (user_domain.CompletedActivities( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( completed_activities.exploration_ids, ['exp_id0']) completed_activities.remove_exploration_id('exp_id0') self.assertListEqual( completed_activities.exploration_ids, []) def test_add_collection_id(self): """Testing add_collection_id.""" completed_activities = (user_domain.CompletedActivities( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( completed_activities.collection_ids, ['collect_id0']) completed_activities.add_collection_id('collect_id1') self.assertListEqual( completed_activities.collection_ids, ['collect_id0', 'collect_id1']) def test_remove_collection_id(self): """Testing remove_collection_id.""" completed_activities = (user_domain.CompletedActivities( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( completed_activities.collection_ids, ['collect_id0']) completed_activities.remove_collection_id('collect_id0') self.assertListEqual( completed_activities.collection_ids, []) class LearnerPlaylistTests(test_utils.GenericTestBase): """Testing domain object for the learner playlist.""" def test_initialization(self): """Testing init method.""" learner_playlist = (user_domain.LearnerPlaylist( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertEqual(learner_playlist.id, 'user_id0') self.assertListEqual( learner_playlist.exploration_ids, ['exp_id0']) self.assertListEqual( learner_playlist.collection_ids, ['collect_id0']) def test_insert_exploration_id_at_given_position(self): """Testing inserting the given exploration id at the given position.""" learner_playlist = (user_domain.LearnerPlaylist( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( learner_playlist.exploration_ids, ['exp_id0']) learner_playlist.insert_exploration_id_at_given_position( 'exp_id1', 1) learner_playlist.insert_exploration_id_at_given_position( 'exp_id2', 1) self.assertListEqual( learner_playlist.exploration_ids, ['exp_id0', 'exp_id2', 'exp_id1']) def test_add_exploration_id_to_list(self): """Testing add_exploration_id_to_list.""" learner_playlist = (user_domain.LearnerPlaylist( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( learner_playlist.exploration_ids, ['exp_id0']) learner_playlist.add_exploration_id_to_list('exp_id1') self.assertListEqual( learner_playlist.exploration_ids, ['exp_id0', 'exp_id1']) def test_insert_collection_id_at_given_position(self): """Testing insert_exploration_id_at_given_position.""" learner_playlist = (user_domain.LearnerPlaylist( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( learner_playlist.collection_ids, ['collect_id0']) learner_playlist.insert_collection_id_at_given_position( 'collect_id1', 1) learner_playlist.insert_collection_id_at_given_position( 'collect_id2', 1) self.assertListEqual( learner_playlist.collection_ids, ['collect_id0', 'collect_id2', 'collect_id1']) def test_add_collection_id_list(self): """Testing add_collection_id.""" learner_playlist = (user_domain.LearnerPlaylist( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( learner_playlist.collection_ids, ['collect_id0']) learner_playlist.add_collection_id_to_list('collect_id1') self.assertListEqual( learner_playlist.collection_ids, ['collect_id0', 'collect_id1']) def test_remove_exploration_id(self): """Testing remove_exploration_id.""" learner_playlist = (user_domain.LearnerPlaylist( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( learner_playlist.exploration_ids, ['exp_id0']) learner_playlist.remove_exploration_id('exp_id0') self.assertListEqual( learner_playlist.exploration_ids, []) def test_remove_collection_id(self): """Testing remove_collection_id.""" learner_playlist = (user_domain.LearnerPlaylist( 'user_id0', ['exp_id0'], ['collect_id0'])) self.assertListEqual( learner_playlist.collection_ids, ['collect_id0']) learner_playlist.remove_collection_id('collect_id0') self.assertListEqual( learner_playlist.collection_ids, []) class UserContributionProficiencyTests(test_utils.GenericTestBase): """Testing domain object for user contribution scoring model.""" def setUp(self): super(UserContributionProficiencyTests, self).setUp() self.user_proficiency = user_domain.UserContributionProficiency( 'user_id0', 'category0', 0, False) def test_initialization(self): """Testing init method.""" self.assertEqual(self.user_proficiency.user_id, 'user_id0') self.assertEqual( self.user_proficiency.score_category, 'category0') self.assertEqual(self.user_proficiency.score, 0) self.assertEqual( self.user_proficiency.onboarding_email_sent, False) def test_increment_score(self): self.assertEqual(self.user_proficiency.score, 0) self.user_proficiency.increment_score(4) self.assertEqual(self.user_proficiency.score, 4) self.user_proficiency.increment_score(-3) self.assertEqual(self.user_proficiency.score, 1) def test_can_user_review_category(self): self.assertEqual(self.user_proficiency.score, 0) self.assertFalse(self.user_proficiency.can_user_review_category()) self.user_proficiency.increment_score( feconf.MINIMUM_SCORE_REQUIRED_TO_REVIEW) self.assertTrue(self.user_proficiency.can_user_review_category()) def test_mark_onboarding_email_as_sent(self): self.assertFalse(self.user_proficiency.onboarding_email_sent) self.user_proficiency.mark_onboarding_email_as_sent() self.assertTrue(self.user_proficiency.onboarding_email_sent) class UserContributionRightsTests(test_utils.GenericTestBase): """Testing UserContributionRights domain object.""" def setUp(self): super(UserContributionRightsTests, self).setUp() self.user_contribution_rights = user_domain.UserContributionRights( 'user_id', ['hi'], [], True) def test_initialization(self): """Testing init method.""" self.assertEqual(self.user_contribution_rights.id, 'user_id') self.assertEqual( self.user_contribution_rights .can_review_translation_for_language_codes, ['hi']) self.assertEqual( self.user_contribution_rights .can_review_voiceover_for_language_codes, []) self.assertEqual( self.user_contribution_rights.can_review_questions, True) def test_can_review_translation_for_language_codes_incorrect_type(self): self.user_contribution_rights.can_review_translation_for_language_codes = 5 # pylint: disable=line-too-long with self.assertRaisesRegexp( utils.ValidationError, 'Expected can_review_translation_for_language_codes to be a list'): self.user_contribution_rights.validate() def test_can_review_voiceover_for_language_codes_incorrect_type(self): self.user_contribution_rights.can_review_voiceover_for_language_codes = 5 # pylint: disable=line-too-long with self.assertRaisesRegexp( utils.ValidationError, 'Expected can_review_voiceover_for_language_codes to be a list'): self.user_contribution_rights.validate() def test_incorrect_language_code_for_voiceover_raise_error(self): self.user_contribution_rights.can_review_voiceover_for_language_codes = [ # pylint: disable=line-too-long 'invalid_lang_code'] with self.assertRaisesRegexp( utils.ValidationError, 'Invalid language_code: invalid_lang_code'): self.user_contribution_rights.validate() def test_incorrect_language_code_for_translation_raise_error(self): self.user_contribution_rights.can_review_translation_for_language_codes = [ # pylint: disable=line-too-long 'invalid_lang_code'] with self.assertRaisesRegexp( utils.ValidationError, 'Invalid language_code: invalid_lang_code'): self.user_contribution_rights.validate() def test_can_review_voiceover_for_language_codes_with_duplicate_values( self): self.user_contribution_rights.can_review_voiceover_for_language_codes = [ # pylint: disable=line-too-long 'hi'] self.user_contribution_rights.validate() self.user_contribution_rights.can_review_voiceover_for_language_codes = [ # pylint: disable=line-too-long 'hi', 'hi'] with self.assertRaisesRegexp( utils.ValidationError, 'Expected can_review_voiceover_for_language_codes list not to have ' 'duplicate values'): self.user_contribution_rights.validate() def test_can_review_translation_for_language_codes_with_duplicate_values( self): self.user_contribution_rights.can_review_translation_for_language_codes = [ # pylint: disable=line-too-long 'hi'] self.user_contribution_rights.validate() self.user_contribution_rights.can_review_translation_for_language_codes = [ # pylint: disable=line-too-long 'hi', 'hi'] with self.assertRaisesRegexp( utils.ValidationError, 'Expected can_review_translation_for_language_codes list not to ' 'have duplicate values'): self.user_contribution_rights.validate() def test_incorrect_type_for_can_review_questions_raise_error(self): self.user_contribution_rights.can_review_questions = 5 with self.assertRaisesRegexp( utils.ValidationError, 'Expected can_review_questions to be a boolean value'): self.user_contribution_rights.validate() class ModifiableUserDataTests(test_utils.GenericTestBase): """Testing domain object for modifiable user data.""" def test_initialization_with_none_user_id_is_successful(self): """Testing init method user id set None.""" schema_version = 1 user_data_dict = { 'schema_version': 1, 'display_alias': 'display_alias', 'pin': '123', 'preferred_language_codes': 'preferred_language_codes', 'preferred_site_language_code': 'preferred_site_language_code', 'preferred_audio_language_code': 'preferred_audio_language_code', 'user_id': None, } modifiable_user_data = ( user_domain.ModifiableUserData.from_raw_dict(user_data_dict) ) self.assertEqual( modifiable_user_data.display_alias, 'display_alias') self.assertEqual(modifiable_user_data.pin, '123') self.assertEqual( modifiable_user_data.preferred_language_codes, 'preferred_language_codes' ) self.assertEqual( modifiable_user_data.preferred_site_language_code, 'preferred_site_language_code' ) self.assertEqual( modifiable_user_data.preferred_audio_language_code, 'preferred_audio_language_code' ) self.assertIsNone(modifiable_user_data.user_id) self.assertEqual(modifiable_user_data.version, schema_version) def test_initialization_with_valid_user_id_is_successful(self): """Testing init method with a valid user id set.""" schema_version = 1 user_data_dict = { 'schema_version': 1, 'display_alias': 'display_alias', 'pin': '123', 'preferred_language_codes': 'preferred_language_codes', 'preferred_site_language_code': 'preferred_site_language_code', 'preferred_audio_language_code': 'preferred_audio_language_code', 'user_id': 'user_id', } modifiable_user_data = ( user_domain.ModifiableUserData.from_raw_dict(user_data_dict) ) self.assertEqual( modifiable_user_data.display_alias, 'display_alias') self.assertEqual(modifiable_user_data.pin, '123') self.assertEqual( modifiable_user_data.preferred_language_codes, 'preferred_language_codes' ) self.assertEqual( modifiable_user_data.preferred_site_language_code, 'preferred_site_language_code' ) self.assertEqual( modifiable_user_data.preferred_audio_language_code, 'preferred_audio_language_code' ) self.assertEqual(modifiable_user_data.user_id, 'user_id') self.assertEqual(modifiable_user_data.version, schema_version) # This test should be modified to use the original class ModifiableUserData # itself when the CURRENT_SCHEMA_VERSION has been updated to 2 or higher. def test_mock_modifiable_user_data_class_with_all_attributes_given(self): user_data_dict = { 'schema_version': 2, 'display_alias': 'name', 'pin': '123', 'preferred_language_codes': ['en', 'es'], 'preferred_site_language_code': 'es', 'preferred_audio_language_code': 'en', 'user_id': None, 'fake_field': 'set_value' } modifiable_user_data = ( MockModifiableUserData.from_raw_dict(user_data_dict)) self.assertEqual(modifiable_user_data.display_alias, 'name') self.assertEqual(modifiable_user_data.pin, '123') self.assertEqual( modifiable_user_data.preferred_language_codes, ['en', 'es']) self.assertEqual( modifiable_user_data.preferred_site_language_code, 'es') self.assertEqual( modifiable_user_data.preferred_audio_language_code, 'en') self.assertEqual(modifiable_user_data.fake_field, 'set_value') self.assertEqual(modifiable_user_data.user_id, None) self.assertEqual(modifiable_user_data.version, 2) # This test should be modified to use the original class ModifiableUserData # itself when the CURRENT_SCHEMA_VERSION has been updated to 2 or higher. def test_mock_migration_from_old_version_to_new_works_correctly(self): user_data_dict = { 'schema_version': 1, 'display_alias': 'name', 'pin': '123', 'preferred_language_codes': ['en', 'es'], 'preferred_site_language_code': 'es', 'preferred_audio_language_code': 'en', 'user_id': None } modifiable_user_data = MockModifiableUserData.from_raw_dict( user_data_dict) self.assertEqual(modifiable_user_data.display_alias, 'name') self.assertEqual(modifiable_user_data.pin, '123') self.assertEqual( modifiable_user_data.preferred_language_codes, ['en', 'es']) self.assertEqual( modifiable_user_data.preferred_site_language_code, 'es') self.assertEqual( modifiable_user_data.preferred_audio_language_code, 'en') self.assertEqual(modifiable_user_data.fake_field, 'default_value') self.assertEqual(modifiable_user_data.user_id, None) self.assertEqual(modifiable_user_data.version, 2)<|fim▁end|>
# Overriding method to add a new attribute added names 'fake_field'.
<|file_name|>test_tlslite_utils_ecc.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014, Hubert Kario # # See the LICENSE file for legal information regarding use of this file. # compatibility with Python 2.6, for that we need unittest2 package, # which is not available on 3.3 or 3.4 try: import unittest2 as unittest except ImportError: import unittest from tlslite.utils.ecc import decodeX962Point, encodeX962Point, getCurveByName,\ getPointByteSize import ecdsa class TestEncoder(unittest.TestCase): def test_encode_P_256_point(self): point = ecdsa.NIST256p.generator * 200 self.assertEqual(encodeX962Point(point), bytearray(b'\x04' # x coordinate b'\x3a\x53\x5b\xd0\xbe\x46\x6f\xf3\xd8\x56'<|fim▁hole|> b'\x3b\x34' # y coordinate b'\x66\xab\xa8\x18\x5b\x33\x41\xe0\xc2\xe3' b'\xd1\xb3\xae\x69\xe4\x7d\x0f\x01\xd4\xbb' b'\xd7\x06\xd9\x57\x8b\x0b\x65\xd6\xd3\xde' b'\x1e\xfe' )) def test_encode_P_256_point_with_zero_first_byte_on_x(self): point = ecdsa.NIST256p.generator * 379 self.assertEqual(encodeX962Point(point), bytearray(b'\x04' b'\x00\x55\x43\x89\x4a\xf3\xd0\x0e\xd7\xd7' b'\x40\xab\xdb\xd7\x5c\x96\xb0\x68\x77\xb7' b'\x87\xdb\x5f\x70\xee\xa7\x8b\x90\xa8\xd7' b'\xc0\x0a' b'\xbb\x4c\x85\xa3\xd8\xea\x29\xef\xaa\xfa' b'\x24\x40\x69\x12\xdd\x84\xd5\xb1\x4d\xc3' b'\x2b\xf6\x56\xef\x6c\x6b\xd5\x8a\x5d\x94' b'\x3f\x92' )) def test_encode_P_256_point_with_zero_first_byte_on_y(self): point = ecdsa.NIST256p.generator * 43 self.assertEqual(encodeX962Point(point), bytearray(b'\x04' b'\x98\x6a\xe2\x50\x6f\x1f\xf1\x04\xd0\x42' b'\x30\x86\x1d\x8f\x4b\x49\x8f\x4b\xc4\xc6' b'\xd0\x09\xb3\x0f\x75\x44\xdc\x12\x9b\x82' b'\xd2\x8d' b'\x00\x3c\xcc\xc0\xa6\x46\x0e\x0a\xe3\x28' b'\xa4\xd9\x7d\x3c\x7b\x61\xd8\x6f\xc6\x28' b'\x9c\x18\x9f\x25\x25\x11\x0c\x44\x1b\xb0' b'\x7e\x97' )) def test_encode_P_256_point_with_two_zero_first_bytes_on_x(self): point = ecdsa.NIST256p.generator * 40393 self.assertEqual(encodeX962Point(point), bytearray(b'\x04' b'\x00\x00\x3f\x5f\x17\x8a\xa0\x70\x6c\x42' b'\x31\xeb\x6e\x54\x95\xaa\x16\x42\xc5\xb8' b'\xa9\x94\x12\x7c\x89\x46\x5f\x22\x99\x4a' b'\x42\xf9' b'\xc2\x48\xb3\x37\x59\x9f\x0c\x2f\x29\x77' b'\x2e\x25\x6f\x1d\x55\x49\xc8\x9b\xa9\xe5' b'\x73\x13\x82\xcd\x1e\x3c\xc0\x9d\x10\xd0' b'\x0b\x55')) def test_encode_P_521_point(self): point = ecdsa.NIST521p.generator * 200 self.assertEqual(encodeX962Point(point), bytearray(b'\x04' b'\x00\x3e\x2a\x2f\x9f\xd5\x9f\xc3\x8d\xfb' b'\xde\x77\x26\xa0\xbf\xc6\x48\x2a\x6b\x2a' b'\x86\xf6\x29\xb8\x34\xa0\x6c\x3d\x66\xcd' b'\x79\x8d\x9f\x86\x2e\x89\x31\xf7\x10\xc7' b'\xce\x89\x15\x9f\x35\x8b\x4a\x5c\x5b\xb3' b'\xd2\xcc\x9e\x1b\x6e\x94\x36\x23\x6d\x7d' b'\x6a\x5e\x00\xbc\x2b\xbe' b'\x01\x56\x7a\x41\xcb\x48\x8d\xca\xd8\xe6' b'\x3a\x3f\x95\xb0\x8a\xf6\x99\x2a\x69\x6a' b'\x37\xdf\xc6\xa1\x93\xff\xbc\x3f\x91\xa2' b'\x96\xf3\x3c\x66\x15\x57\x3c\x1c\x06\x7f' b'\x0a\x06\x4d\x18\xbd\x0c\x81\x4e\xf7\x2a' b'\x8f\x76\xf8\x7f\x9b\x7d\xff\xb2\xf4\x26' b'\x36\x43\x43\x86\x11\x89')) class TestDecoder(unittest.TestCase): def test_decode_P_256_point(self): point = ecdsa.NIST256p.generator * 379 data = bytearray(b'\x04' b'\x00\x55\x43\x89\x4a\xf3\xd0\x0e\xd7\xd7' b'\x40\xab\xdb\xd7\x5c\x96\xb0\x68\x77\xb7' b'\x87\xdb\x5f\x70\xee\xa7\x8b\x90\xa8\xd7' b'\xc0\x0a' b'\xbb\x4c\x85\xa3\xd8\xea\x29\xef\xaa\xfa' b'\x24\x40\x69\x12\xdd\x84\xd5\xb1\x4d\xc3' b'\x2b\xf6\x56\xef\x6c\x6b\xd5\x8a\x5d\x94' b'\x3f\x92' ) decoded_point = decodeX962Point(data, ecdsa.NIST256p) self.assertEqual(point, decoded_point) def test_decode_P_521_point(self): data = bytearray(b'\x04' b'\x01\x7d\x8a\x5d\x11\x03\x4a\xaf\x01\x26' b'\x5f\x2d\xd6\x2d\x76\xeb\xd8\xbe\x4e\xfb' b'\x3b\x4b\xd2\x05\x5a\xed\x4c\x6d\x20\xc7' b'\xf3\xd7\x08\xab\x21\x9e\x34\xfd\x14\x56' b'\x3d\x47\xd0\x02\x65\x15\xc2\xdd\x2d\x60' b'\x66\xf9\x15\x64\x55\x7a\xae\x56\xa6\x7a' b'\x28\x51\x65\x26\x5c\xcc' b'\x01\xd4\x19\x56\xfa\x14\x6a\xdb\x83\x1c' b'\xb6\x1a\xc4\x4b\x40\xb1\xcb\xcc\x9e\x4f' b'\x57\x2c\xb2\x72\x70\xb9\xef\x38\x15\xae' b'\x87\x1f\x85\x40\x94\xda\x69\xed\x97\xeb' b'\xdc\x72\x25\x25\x61\x76\xb2\xde\xed\xa2' b'\xb0\x5c\xca\xc4\x83\x8f\xfb\x54\xae\xe0' b'\x07\x45\x0b\xbf\x7c\xfc') point = decodeX962Point(data, ecdsa.NIST521p) self.assertIsNotNone(point) self.assertEqual(encodeX962Point(point), data) def test_decode_with_missing_data(self): data = bytearray(b'\x04' b'\x00\x55\x43\x89\x4a\xf3\xd0\x0e\xd7\xd7' b'\x40\xab\xdb\xd7\x5c\x96\xb0\x68\x77\xb7' b'\x87\xdb\x5f\x70\xee\xa7\x8b\x90\xa8\xd7' b'\xc0\x0a' b'\xbb\x4c\x85\xa3\xd8\xea\x29\xef\xaa\xfa' b'\x24\x40\x69\x12\xdd\x84\xd5\xb1\x4d\xc3' b'\x2b\xf6\x56\xef\x6c\x6b\xd5\x8a\x5d\x94' #b'\x3f\x92' ) # XXX will change later as decoder in tlslite-ng needs to be updated with self.assertRaises(SyntaxError): decodeX962Point(data, ecdsa.NIST256p) class TestCurveLookup(unittest.TestCase): def test_with_correct_name(self): curve = getCurveByName('secp256r1') self.assertIs(curve, ecdsa.NIST256p) def test_with_invalid_name(self): with self.assertRaises(ValueError): getCurveByName('NIST256p') class TestGetPointByteSize(unittest.TestCase): def test_with_curve(self): self.assertEqual(getPointByteSize(ecdsa.NIST256p), 32) def test_with_point(self): self.assertEqual(getPointByteSize(ecdsa.NIST384p.generator * 10), 48) def test_with_invalid_argument(self): with self.assertRaises(ValueError): getPointByteSize("P-256")<|fim▁end|>
b'\xa0\x77\xaa\xd9\x50\x4f\x16\xaa\x5d\x52' b'\x28\xfc\xd7\xc2\x77\x48\x85\xee\x21\x3f'
<|file_name|>qtmain.cpp<|end_file_name|><|fim▁begin|>#include <QApplication> #include "qtreversimenudialog.h" int main(int argc, char *argv[]) { QApplication a(argc, argv); ribi::reversi::QtReversiMenuDialog w; w.show();<|fim▁hole|> return a.exec(); }<|fim▁end|>
<|file_name|>post.js<|end_file_name|><|fim▁begin|>const mongoose = require('mongoose') const TABLE_NAME = 'Post' const Schema = mongoose.Schema const ObjectId = Schema.Types.ObjectId const escape = (require('../utils')).escape const PostSchema = new Schema({ //类型 type: { type: String, default: 'post' // post | page }, //标题 title: { type: String, trim: true, set: escape }, //别名 alias: { type: String, trim: true, set: escape }, //创建者 user: { type: ObjectId, ref: 'User'<|fim▁hole|> //类别 category: { type: ObjectId, ref: 'Category' }, //摘要 excerpt: { type: String, trim: true, }, //内容 contents: { type: String, trim: true, }, //markdown markdown: { type: String, trim: true, }, //标签 tags: Array, //缩略图 thumbnail: { type: String, trim: true, set: escape }, //统计 count: { //浏览次数 views: { type: Number, default: 1, }, //评论数 comments: { type: Number, default: 0, }, //点赞数 praises: { type: Number, default: 1 } }, //状态 status: { type: Number, default: 1 // 1:发布, 0 :草稿, -1 :删除 }, //置顶 top: Boolean, //允许评论 allowComment: { type: Boolean, default: true }, //允许打赏 allowReward: Boolean, //著名版权 license: Boolean, //使用密码 usePassword: Boolean, //密码 password: { type: String, trim: true }, order: { type: Number, default: 1 }, //创建时间 createTime: { type: Date, default: Date.now() }, //修改时间 updateTime: { type: Date, default: Date.now() } }, { connection: TABLE_NAME, versionKey: false, }) module.exports = mongoose.model(TABLE_NAME, PostSchema)<|fim▁end|>
},
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A bare-metal library supplying functions rustc may lower code to //! //! This library is not intended for general use, and is superseded by a system //! libc if one is available. In a freestanding context, however, common //! functions such as memset, memcpy, etc are not implemented. This library //! provides an implementation of these functions which are either required by //! libcore or called by rustc implicitly. //! //! This library is never included by default, and must be manually included if //! necessary. It is an error to include this library when also linking with //! the system libc library. #![crate_id = "rlibc#0.11.0-pre"] #![license = "MIT/ASL2"] #![crate_type = "rlib"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico",<|fim▁hole|> html_root_url = "http://static.rust-lang.org/doc/master")] #![no_std] #![experimental] // This library is definining the builtin functions, so it would be a shame for // LLVM to optimize these function calls to themselves! #![no_builtins] #[cfg(test)] extern crate std; #[cfg(test)] extern crate native; // Require the offset intrinsics for LLVM to properly optimize the // implementations below. If pointer arithmetic is done through integers the // optimizations start to break down. extern "rust-intrinsic" { fn offset<T>(dst: *T, offset: int) -> *T; } #[no_mangle] pub unsafe extern "C" fn memcpy(dest: *mut u8, src: *u8, n: uint) -> *mut u8 { let mut i = 0; while i < n { *(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int); i += 1; } return dest; } #[no_mangle] pub unsafe extern "C" fn memmove(dest: *mut u8, src: *u8, n: uint) -> *mut u8 { if src < dest as *u8 { // copy from end let mut i = n; while i != 0 { i -= 1; *(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int); } } else { // copy from beginning let mut i = 0; while i < n { *(offset(dest as *u8, i as int) as *mut u8) = *offset(src, i as int); i += 1; } } return dest; } #[no_mangle] pub unsafe extern "C" fn memset(s: *mut u8, c: i32, n: uint) -> *mut u8 { let mut i = 0; while i < n { *(offset(s as *u8, i as int) as *mut u8) = c as u8; i += 1; } return s; } #[no_mangle] pub unsafe extern "C" fn memcmp(s1: *u8, s2: *u8, n: uint) -> i32 { let mut i = 0; while i < n { let a = *offset(s1, i as int); let b = *offset(s2, i as int); if a != b { return (a - b) as i32 } i += 1; } return 0; } #[test] fn work_on_windows() { } // FIXME #10872 needed for a happy windows<|fim▁end|>
<|file_name|>chassis_1_0_0_chassis_actions.py<|end_file_name|><|fim▁begin|># coding: utf-8 """ Copyright 2015 SmartBear Software Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Ref: https://github.com/swagger-api/swagger-codegen """ from pprint import pformat from six import iteritems class Chassis100ChassisActions(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ def __init__(self): """ Chassis100ChassisActions - a model defined in Swagger :param dict swaggerTypes: The key is attribute name and the value is attribute type. :param dict attributeMap: The key is attribute name and the value is json key in definition.<|fim▁hole|> self.swagger_types = { 'oem': 'object', 'chassis_reset': 'Chassis100Reset' } self.attribute_map = { 'oem': 'Oem', 'chassis_reset': '#Chassis.Reset' } self._oem = None self._chassis_reset = None @property def oem(self): """ Gets the oem of this Chassis100ChassisActions. :return: The oem of this Chassis100ChassisActions. :rtype: object """ return self._oem @oem.setter def oem(self, oem): """ Sets the oem of this Chassis100ChassisActions. :param oem: The oem of this Chassis100ChassisActions. :type: object """ self._oem = oem @property def chassis_reset(self): """ Gets the chassis_reset of this Chassis100ChassisActions. :return: The chassis_reset of this Chassis100ChassisActions. :rtype: Chassis100Reset """ return self._chassis_reset @chassis_reset.setter def chassis_reset(self, chassis_reset): """ Sets the chassis_reset of this Chassis100ChassisActions. :param chassis_reset: The chassis_reset of this Chassis100ChassisActions. :type: Chassis100Reset """ self._chassis_reset = chassis_reset def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other<|fim▁end|>
"""
<|file_name|>forloop.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python # Joe Deller 2014 # Using for loops # Level : Beginner # Uses : Libraries, variables, operators, loops # Loops are a very important part of programming # The for loop is a very common loop # It counts from a starting number to a finishing number # It normally counts up in ones, but you can count up # in any number you want, or count downwards # # The wool block in minecraft can be any one of 16 different colours # from 0, a white block, to 15, a black block # This program uses a for loop to draw wool blocks # of all 16 different colours # It also uses the for loop to set where the block is drawn # so we can see all 16 colours import mcpi.minecraft as minecraft import mcpi.block as block import time<|fim▁hole|># set us down in the middle of the world mc = minecraft.Minecraft.create() x, y, z = mc.player.getPos() mc.setBlocks(x - 20, y, z - 20, x + 20, y + 20, z + 20, block.AIR) mc.setBlocks(z - 20, y - 1, z - 20, y, z + 20, block.GRASS.id) for colour in range(0, 15): # draw upwards mc.setBlock(x + 15, y + 2 + colour, z + 2, block.WOOL.id, colour) # draw across mc.setBlock(x + colour, y + 2, z + 2, block.WOOL.id, colour) time.sleep(.5) # Counting backwards, using a negative number to say how quickly to count backwards # Try changing this to -2 and see what happens for colour in range(15, 0, -1): mc.setBlock(x, 1 + colour, z + 2, block.WOOL.id, colour) mc.setBlock(colour, y + 16, z + 2, block.WOOL.id, colour) time.sleep(.1)<|fim▁end|>
# Setup the connection and clear a space
<|file_name|>bigip_iapp_service.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright 2017 F5 Networks Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = { 'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.0' } DOCUMENTATION = ''' --- module: bigip_iapp_service short_description: Manages TCL iApp services on a BIG-IP. description: - Manages TCL iApp services on a BIG-IP. version_added: "2.4" options: name: description: - The name of the iApp service that you want to deploy. required: True template: description: - The iApp template from which to instantiate a new service. This template must exist on your BIG-IP before you can successfully create a service. This parameter is required if the C(state) parameter is C(present). parameters: description: - A hash of all the required template variables for the iApp template. If your parameters are stored in a file (the more common scenario) it is recommended you use either the `file` or `template` lookups to supply the expected parameters. force: description: - Forces the updating of an iApp service even if the parameters to the service have not changed. This option is of particular importance if the iApp template that underlies the service has been updated in-place. This option is equivalent to re-configuring the iApp if that template has changed. default: False state: description: - When C(present), ensures that the iApp service is created and running. When C(absent), ensures that the iApp service has been removed. default: present choices: - present - absent notes: - Requires the f5-sdk Python package on the host. This is as easy as pip install f5-sdk. - Requires the deepdiff Python package on the host. This is as easy as pip install f5-sdk. requirements: - f5-sdk - deepdiff extends_documentation_fragment: f5 author: - Tim Rupp (@caphrim007) ''' EXAMPLES = ''' - name: Create HTTP iApp service from iApp template bigip_iapp_service: name: "foo-service" template: "f5.http" parameters: "{{ lookup('file', 'f5.http.parameters.json') }}" password: "secret" server: "lb.mydomain.com" state: "present" user: "admin" delegate_to: localhost - name: Upgrade foo-service to v1.2.0rc4 of the f5.http template bigip_iapp_service: name: "foo-service" template: "f5.http.v1.2.0rc4" password: "secret" server: "lb.mydomain.com" state: "present" user: "admin" delegate_to: localhost - name: Configure a service using parameters in YAML bigip_iapp_service: name: "tests" template: "web_frontends" password: "admin" server: "{{ inventory_hostname }}" server_port: "{{ bigip_port }}" validate_certs: "{{ validate_certs }}" state: "present" user: "admin" parameters: variables: - name: "var__vs_address" value: "1.1.1.1" - name: "pm__apache_servers_for_http" value: "2.2.2.1:80" - name: "pm__apache_servers_for_https" value: "2.2.2.2:80" delegate_to: localhost - name: Re-configure a service whose underlying iApp was updated in place bigip_iapp_service: name: "tests" template: "web_frontends" password: "admin" force: yes server: "{{ inventory_hostname }}" server_port: "{{ bigip_port }}" validate_certs: "{{ validate_certs }}" state: "present" user: "admin" parameters: variables: - name: "var__vs_address" value: "1.1.1.1" - name: "pm__apache_servers_for_http" value: "2.2.2.1:80" - name: "pm__apache_servers_for_https" value: "2.2.2.2:80" delegate_to: localhost ''' RETURN = ''' # only common fields returned ''' from ansible.module_utils.f5_utils import ( AnsibleF5Client, AnsibleF5Parameters, HAS_F5SDK, F5ModuleError, iteritems, iControlUnexpectedHTTPError ) from deepdiff import DeepDiff class Parameters(AnsibleF5Parameters): returnables = [] api_attributes = [ 'tables', 'variables', 'template', 'lists' ] updatables = ['tables', 'variables', 'lists'] def to_return(self): result = {} for returnable in self.returnables: result[returnable] = getattr(self, returnable) result = self._filter_params(result) return result def api_params(self): result = {} for api_attribute in self.api_attributes: if self.api_map is not None and api_attribute in self.api_map: result[api_attribute] = getattr(self, self.api_map[api_attribute]) else: result[api_attribute] = getattr(self, api_attribute) result = self._filter_params(result) return result @property def tables(self): result = [] if not self._values['tables']: return None tables = self._values['tables'] for table in tables: tmp = dict() name = table.get('name', None) if name is None: raise F5ModuleError( "One of the provided tables does not have a name" ) tmp['name'] = str(name) columns = table.get('columnNames', None) if columns: tmp['columnNames'] = [str(x) for x in columns] # You cannot have rows without columns rows = table.get('rows', None) if rows: tmp['rows'] = [] for row in rows: tmp['rows'].append(dict(row=[str(x) for x in row['row']])) result.append(tmp) result = sorted(result, key=lambda k: k['name']) return result @tables.setter def tables(self, value): self._values['tables'] = value @property def variables(self): result = [] if not self._values['variables']: return None variables = self._values['variables'] for variable in variables: tmp = dict((str(k), str(v)) for k, v in iteritems(variable)) if 'encrypted' not in tmp: # BIG-IP will inject an 'encrypted' key if you don't provide one. # If you don't provide one, then we give you the default 'no', by # default. tmp['encrypted'] = 'no' if 'value' not in tmp: tmp['value'] = '' # This seems to happen only on 12.0.0 elif tmp['value'] == 'none': tmp['value'] = '' result.append(tmp) result = sorted(result, key=lambda k: k['name']) return result @variables.setter def variables(self, value): self._values['variables'] = value @property def lists(self): result = [] if not self._values['lists']: return None lists = self._values['lists'] for list in lists: tmp = dict((str(k), str(v)) for k, v in iteritems(list) if k != 'value') if 'encrypted' not in list: # BIG-IP will inject an 'encrypted' key if you don't provide one. # If you don't provide one, then we give you the default 'no', by # default. tmp['encrypted'] = 'no' if 'value' in list: if len(list['value']) > 0: # BIG-IP removes empty values entries, so mimic this behavior # for user-supplied values. tmp['value'] = [str(x) for x in list['value']] result.append(tmp) result = sorted(result, key=lambda k: k['name']) return result @lists.setter def lists(self, value): self._values['lists'] = value @property def parameters(self): return dict( tables=self.tables, variables=self.variables, lists=self.lists ) @parameters.setter def parameters(self, value): if value is None: return if 'tables' in value: self.tables = value['tables'] if 'variables' in value: self.variables = value['variables'] if 'lists' in value: self.lists = value['lists'] @property def template(self): if self._values['template'] is None: return None if self._values['template'].startswith("/" + self.partition): return self._values['template'] elif self._values['template'].startswith("/"): return self._values['template'] else: return '/{0}/{1}'.format( self.partition, self._values['template'] ) @template.setter def template(self, value): self._values['template'] = value class ModuleManager(object): def __init__(self, client): self.client = client self.have = None self.want = Parameters(self.client.module.params) self.changes = Parameters() def _set_changed_options(self): changed = {} for key in Parameters.returnables: if getattr(self.want, key) is not None: changed[key] = getattr(self.want, key) if changed: self.changes = Parameters(changed) def _update_changed_options(self): changed = {} for key in Parameters.updatables: if getattr(self.want, key) is not None: attr1 = getattr(self.want, key) attr2 = getattr(self.have, key) if attr1 != attr2: changed[key] = str(DeepDiff(attr1, attr2)) if changed: self.changes = Parameters(changed) return True return False def exec_module(self): changed = False result = dict() state = self.want.state try: if state == "present": changed = self.present() elif state == "absent": changed = self.absent() except iControlUnexpectedHTTPError as e: raise F5ModuleError(str(e)) changes = self.changes.to_return() result.update(**changes) result.update(dict(changed=changed)) return result def exists(self): result = self.client.api.tm.sys.application.services.service.exists( name=self.want.name, partition=self.want.partition ) return result def present(self): if self.exists(): return self.update() else: return self.create() def create(self): self._set_changed_options() if self.client.check_mode: return True self.create_on_device() return True def update(self): self.have = self.read_current_from_device() if not self.should_update() and not self.want.force: return False if self.client.check_mode: return True self.update_on_device() return True def should_update(self): result = self._update_changed_options() if result: return True return False def update_on_device(self): params = self.want.api_params() params['execute-action'] = 'definition' resource = self.client.api.tm.sys.application.services.service.load( name=self.want.name, partition=self.want.partition ) resource.update(**params) def read_current_from_device(self): result = self.client.api.tm.sys.application.services.service.load( name=self.want.name, partition=self.want.partition<|fim▁hole|> return Parameters(result) def create_on_device(self): params = self.want.api_params() self.client.api.tm.sys.application.services.service.create( name=self.want.name, partition=self.want.partition, **params ) def absent(self): if self.exists(): return self.remove() return False def remove(self): if self.client.check_mode: return True self.remove_from_device() if self.exists(): raise F5ModuleError("Failed to delete the iApp service") return True def remove_from_device(self): resource = self.client.api.tm.sys.application.services.service.load( name=self.want.name, partition=self.want.partition ) if resource: resource.delete() class ArgumentSpec(object): def __init__(self): self.supports_check_mode = True self.argument_spec = dict( name=dict(required=True), template=dict(), parameters=dict( type='dict' ), state=dict( default='present', choices=['absent', 'present'] ), force=dict( default=False, type='bool' ) ) self.f5_product_name = 'bigip' def main(): if not HAS_F5SDK: raise F5ModuleError("The python f5-sdk module is required") spec = ArgumentSpec() client = AnsibleF5Client( argument_spec=spec.argument_spec, supports_check_mode=spec.supports_check_mode, f5_product_name=spec.f5_product_name ) try: mm = ModuleManager(client) results = mm.exec_module() client.module.exit_json(**results) except F5ModuleError as e: client.module.fail_json(msg=str(e)) if __name__ == '__main__': main()<|fim▁end|>
).to_dict() result.pop('_meta_data', None)
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for sinon-chai 2.7.0 // Project: https://github.com/domenic/sinon-chai // Definitions by: Kazi Manzur Rashid <https://github.com/kazimanzurrashid/>, Jed Mao <https://github.com/jedmao/> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped // TypeScript Version: 2.3 /// <reference types="chai" /> /// <reference types="sinon" /> import * as Sinon from 'sinon'; declare global { export namespace Chai { interface LanguageChains { always: Assertion; } interface Assertion { /** * true if the spy was called at least once. */ called: Assertion; /** * @param count The number of recorded calls. */ callCount(count: number): Assertion; /** * true if the spy was called exactly once. */ calledOnce: Assertion; /** * true if the spy was called exactly twice. */ calledTwice: Assertion; /** * true if the spy was called exactly thrice. */ calledThrice: Assertion; /** * Returns true if the spy was called before anotherSpy. */ calledBefore(anotherSpy: Sinon.SinonSpy): Assertion; /** * Returns true if the spy was called after anotherSpy. */<|fim▁hole|> * Returns true if spy/stub was called with the new operator. Beware that * this is inferred based on the value of the this object and the spy * function's prototype, so it may give false positives if you actively * return the right kind of object. */ calledWithNew: Assertion; /** * Returns true if context was this for this call. */ calledOn(context: any): Assertion; /** * Returns true if call received provided arguments (and possibly others). */ calledWith(...args: any[]): Assertion; /** * Returns true if call received provided arguments and no others. */ calledWithExactly(...args: any[]): Assertion; /** * Returns true if call received matching arguments (and possibly others). * This behaves the same as spyCall.calledWith(sinon.match(arg1), sinon.match(arg2), ...). */ calledWithMatch(...args: any[]): Assertion; /** * Returns true if spy returned the provided value at least once. Uses * deep comparison for objects and arrays. Use spy.returned(sinon.match.same(obj)) * for strict comparison (see matchers). */ returned(obj: any): Assertion; /** * Returns true if spy threw the provided exception object at least once. */ thrown(obj?: Error | typeof Error | string): Assertion; } } } declare function sinonChai(chai: any, utils: any): void; declare namespace sinonChai { } export = sinonChai;<|fim▁end|>
calledAfter(anotherSpy: Sinon.SinonSpy): Assertion; /**
<|file_name|>peasypytest.py<|end_file_name|><|fim▁begin|>from __future__ import print_function<|fim▁hole|> import gettext import gi gi.require_version('Peas', '1.0') from gi.repository import GObject from gi.repository import Peas from gi.repository import Peasy from gi.repository import Geany gettext.bindtextdomain("peasy", "/home/kugel/dev/geany.git/build-linux/dest/share/locale") gettext.textdomain("peasy") _ = gettext.gettext class PeasyPyTester(Peasy.Plugin, Peasy.PluginHelp): __gtype_name = 'PeasyPyTester' object = GObject.property(type=GObject.Object) # why is this needed!? plugin_info = GObject.property(type=Peas.PluginInfo) doc = None def on_closed(self, obj, d): print(d.display_name() + " closed") self.doc = None def do_enable(self): print("do_enable: " + gettext.dgettext("peasy", "Hello from %s!") % self.props.plugin_info.get_name()) self.doc = Geany.Document.new_file("foo") self.geany_plugin.geany_data.object.connect("document-close", self.on_closed) return True def do_disable(self): print("do_disable: " + _("%s says bye!") % self.props.plugin_info.get_name()) if (self.doc and self.doc.is_valid): self.doc.close() def do_help(self): print("Help!!")<|fim▁end|>
<|file_name|>index_stories.py<|end_file_name|><|fim▁begin|>import re from django.core.management.base import BaseCommand from django.contrib.auth.models import User from apps.rss_feeds.models import Feed from apps.reader.models import UserSubscription from optparse import make_option class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option("-u", "--user", dest="user", nargs=1, help="Specify user id or username"), ) def handle(self, *args, **options): if re.match(r"([0-9]+)", options['user']): user = User.objects.get(pk=int(options['user'])) else: user = User.objects.get(username=options['user']) subscriptions = UserSubscription.objects.filter(user=user) print " ---> Indexing %s feeds..." % subscriptions.count() for sub in subscriptions: try:<|fim▁hole|><|fim▁end|>
sub.feed.index_stories_for_search() except Feed.DoesNotExist: print " ***> Couldn't find %s" % sub.feed_id
<|file_name|>swirl.py<|end_file_name|><|fim▁begin|>from bibliopixel.animation.circle import Circle from bibliopixel.colors import palettes class Swirl(Circle): COLOR_DEFAULTS = ('palette', palettes.get('three_sixty')), def __init__(self, layout, angle=12, **kwds): super().__init__(layout, **kwds) self.angle = angle def pre_run(self): self._step = 0 <|fim▁hole|> c = self.palette(self._step) for i in range(self.ringCount): self.layout.set(i, a, c) self._step += amt<|fim▁end|>
def step(self, amt=1): for a in range(0, 360, self.angle):
<|file_name|>application.js<|end_file_name|><|fim▁begin|>// This is a manifest file that'll be compiled into application.js, which will include all the files // listed below. // // Any JavaScript/Coffee file within this directory, lib/assets/javascripts, vendor/assets/javascripts,<|fim▁hole|>// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the // compiled file. // // Read Sprockets README (https://github.com/sstephenson/sprockets#sprockets-directives) for details // about supported directives. // //= require jquery //= require jquery-ui //= require jquery_ujs //= require underscore-min //= require lumen/loader //= require lumen/bootswatch //= require bootstrap3-typeahead //= require bootstrap-datepicker //= require pages //= require airports //= require wishlists //= require base //= require reverse_geocoder // =require current_location //= require mapper //= require map_bounds //= require map<|fim▁end|>
// or vendor/assets/javascripts of plugins, if any, can be referenced here using a relative path. //
<|file_name|>pad.rs<|end_file_name|><|fim▁begin|>use ffi::*; use caps::Caps; use reference::Reference; use object::Object; use std::ptr; use std::mem; use std::ops::{Deref, DerefMut}; pub struct Pad{ pad: Object } #[derive(Debug)] #[repr(isize)] pub enum LinkReturn{ WrongHierarchy = GST_PAD_LINK_WRONG_HIERARCHY as isize, WasLinked = GST_PAD_LINK_WAS_LINKED as isize, WrongDirection = GST_PAD_LINK_WRONG_DIRECTION as isize, NoFormat = GST_PAD_LINK_NOFORMAT as isize, NoSched = GST_PAD_LINK_NOSCHED as isize, Refused = GST_PAD_LINK_REFUSED as isize, } impl Pad{ pub unsafe fn new(pad: *mut GstPad) -> Option<Pad>{ Object::new(pad as *mut GstObject).map(|obj| Pad{ pad: obj }) } pub fn link(&mut self, sink: &mut Pad) -> Result<(), LinkReturn>{ unsafe{ let ret = gst_pad_link(self.gst_pad_mut(), sink.gst_pad_mut()); if ret == GST_PAD_LINK_OK{ Ok(()) }else{ Err(mem::transmute(ret as isize)) } } } pub fn is_linked(&self) -> bool{ unsafe{ let pad: &mut GstPad = mem::transmute(self.gst_pad()); pad.peer != ptr::null_mut() } } pub fn query_caps(&self, filter: Option<Caps>) -> Option<Caps>{ unsafe{ let caps = gst_pad_query_caps(self.gst_pad() as *mut GstPad, filter.map(|mut caps| caps.gst_caps_mut()).unwrap_or(ptr::null_mut())); Caps::new(caps) } } pub unsafe fn gst_pad(&self) -> *const GstPad{ self.pad.gst_object() as *const GstPad } pub unsafe fn gst_pad_mut(&mut self) -> *mut GstPad{ self.pad.gst_object_mut() as *mut GstPad } } impl ::Transfer<GstPad> for Pad{ unsafe fn transfer(self) -> *mut GstPad{ self.pad.transfer() as *mut GstPad } } impl Reference for Pad{ fn reference(&self) -> Pad{ Pad{ pad: self.pad.reference() } } } impl AsRef<Object> for Pad{ fn as_ref(&self) -> &Object{ &self.pad } } impl AsMut<Object> for Pad{ fn as_mut(&mut self) -> &mut Object{ &mut self.pad }<|fim▁hole|>} impl From<Pad> for Object{ fn from(b: Pad) -> Object{ b.pad } } impl Deref for Pad{ type Target = Object; fn deref(&self) -> &Object{ &self.pad } } impl DerefMut for Pad{ fn deref_mut(&mut self) -> &mut Object{ &mut self.pad } }<|fim▁end|>
<|file_name|>post.ts<|end_file_name|><|fim▁begin|>// Imports import { BaseRepository } from './base'; // Imports models import { Post } from './../../entities/post'; export class PostRepository extends BaseRepository { constructor(host: string, username: string, password: string) { super(host, username, password);<|fim▁hole|> await BaseRepository.models.Post.create({ author: post.author, authorImage: post.authorImage, body: post.body, category: post.category, description: post.description, image: post.image, key: post.key, linkedInShareCount: post.linkedInShareCount, publishedTimestamp: post.publishedTimestamp, title: post.title, }); return true; } public async find(key: string): Promise<Post> { const post: any = await BaseRepository.models.Post.find({ where: { key, }, }); if (!post) { return null; } return new Post(post.key, post.title, post.description, post.body, post.image, post.category, post.author, post.authorImage, post.publishedTimestamp, post.linkedInShareCount); } public async update(post: Post): Promise<boolean> { const existingPost: any = await BaseRepository.models.Post.find({ where: { key: post.key, }, }); if (!existingPost) { return false; } existingPost.author = post.author; existingPost.authorImage = post.authorImage; existingPost.body = post.body; existingPost.category = post.category; existingPost.image = post.image; existingPost.description = post.description; existingPost.linkedInShareCount = post.linkedInShareCount; existingPost.publishedTimestamp = post.publishedTimestamp; existingPost.title = post.title; await existingPost.save(); return true; } public async list(): Promise<Post[]> { const posts: any[] = await BaseRepository.models.Post.findAll({ order: [ ['publishedTimestamp', 'DESC'], ], }); return posts.map((x) => new Post(x.key, x.title, x.description, x.body, x.image, x.category, x.author, x.authorImage, x.publishedTimestamp, x.linkedInShareCount)); } }<|fim▁end|>
} public async insert(post: Post): Promise<boolean> {
<|file_name|>db_migrate.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#!hyphen-venv/bin/python import imp from migrate.versioning import api from app import db from config import SQLALCHEMY_DATABASE_URI from config import SQLALCHEMY_MIGRATE_REPO migration = SQLALCHEMY_MIGRATE_REPO + \ '/versions/%03d_migration.py' % \ (api.db_version( SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) + 1) tmp_module = imp.new_module('old_model') old_model = api.create_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) exec old_model in tmp_module.__dict__ script = api.make_update_script_for_model( SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, tmp_module.meta, db.metadata) open(migration, "wt").write(script) api.upgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) print 'New migration saved as ' + migration print 'Current database version: ' + \ str(api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO))<|fim▁end|>
<|file_name|>securitygroups_db.py<|end_file_name|><|fim▁begin|># Copyright 2012 VMware, Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import netaddr from neutron_lib.api.definitions import port as port_def from neutron_lib.api import validators from neutron_lib.callbacks import events from neutron_lib.callbacks import exceptions from neutron_lib.callbacks import registry from neutron_lib.callbacks import resources from neutron_lib import constants from neutron_lib import exceptions as n_exc from neutron_lib.utils import helpers from neutron_lib.utils import net from oslo_utils import uuidutils import six from sqlalchemy.orm import scoped_session from neutron._i18n import _ from neutron.common import constants as n_const from neutron.common import utils from neutron.db import _model_query as model_query from neutron.db import _resource_extend as resource_extend from neutron.db import _utils as db_utils from neutron.db import api as db_api from neutron.db.models import securitygroup as sg_models from neutron.extensions import securitygroup as ext_sg from neutron.objects import base as base_obj from neutron.objects import securitygroup as sg_obj @resource_extend.has_resource_extenders @registry.has_registry_receivers class SecurityGroupDbMixin(ext_sg.SecurityGroupPluginBase): """Mixin class to add security group to db_base_plugin_v2.""" __native_bulk_support = True def create_security_group_bulk(self, context, security_groups): return self._create_bulk('security_group', context, security_groups) def _registry_notify(self, res, event, id=None, exc_cls=None, **kwargs): # NOTE(armax): a callback exception here will prevent the request # from being processed. This is a hook point for backend's validation; # we raise to propagate the reason for the failure. try: registry.notify(res, event, self, **kwargs) except exceptions.CallbackFailure as e: if exc_cls: reason = (_('cannot perform %(event)s due to %(reason)s') % {'event': event, 'reason': e}) raise exc_cls(reason=reason, id=id) @db_api.retry_if_session_inactive() def create_security_group(self, context, security_group, default_sg=False): """Create security group. If default_sg is true that means we are a default security group for a given tenant if it does not exist. """ s = security_group['security_group'] kwargs = { 'context': context, 'security_group': s, 'is_default': default_sg, } self._registry_notify(resources.SECURITY_GROUP, events.BEFORE_CREATE, exc_cls=ext_sg.SecurityGroupConflict, **kwargs) tenant_id = s['tenant_id'] if not default_sg: self._ensure_default_security_group(context, tenant_id) else: existing_def_sg_id = self._get_default_sg_id(context, tenant_id) if existing_def_sg_id is not None: # default already exists, return it return self.get_security_group(context, existing_def_sg_id) with db_api.context_manager.writer.using(context): sg = sg_obj.SecurityGroup( context, id=s.get('id') or uuidutils.generate_uuid(), description=s['description'], project_id=tenant_id, name=s['name'], is_default=default_sg) sg.create() for ethertype in ext_sg.sg_supported_ethertypes: if default_sg: # Allow intercommunication ingress_rule = sg_obj.SecurityGroupRule( context, id=uuidutils.generate_uuid(), project_id=tenant_id, security_group_id=sg.id, direction='ingress', ethertype=ethertype, remote_group_id=sg.id) ingress_rule.create() sg.rules.append(ingress_rule) egress_rule = sg_obj.SecurityGroupRule( context, id=uuidutils.generate_uuid(), project_id=tenant_id, security_group_id=sg.id, direction='egress', ethertype=ethertype) egress_rule.create() sg.rules.append(egress_rule) sg.obj_reset_changes(['rules']) # fetch sg from db to load the sg rules with sg model. # NOTE(yamamoto): Adding rules above bumps the revision # of the SG. It would add SG object to the session. # Expunge it to ensure the following get_object doesn't # use the instance. context.session.expunge(model_query.get_by_id( context, sg_models.SecurityGroup, sg.id)) sg = sg_obj.SecurityGroup.get_object(context, id=sg.id) secgroup_dict = self._make_security_group_dict(sg) kwargs['security_group'] = secgroup_dict self._registry_notify(resources.SECURITY_GROUP, events.PRECOMMIT_CREATE, exc_cls=ext_sg.SecurityGroupConflict, **kwargs) registry.notify(resources.SECURITY_GROUP, events.AFTER_CREATE, self, **kwargs) return secgroup_dict @db_api.retry_if_session_inactive() def get_security_groups(self, context, filters=None, fields=None, sorts=None, limit=None, marker=None, page_reverse=False, default_sg=False): # If default_sg is True do not call _ensure_default_security_group() # so this can be done recursively. Context.tenant_id is checked # because all the unit tests do not explicitly set the context on # GETS. TODO(arosen) context handling can probably be improved here. filters = filters or {} if not default_sg and context.tenant_id: tenant_id = filters.get('tenant_id') if tenant_id: tenant_id = tenant_id[0] else: tenant_id = context.tenant_id self._ensure_default_security_group(context, tenant_id) pager = base_obj.Pager( sorts=sorts, limit=limit, marker=marker, page_reverse=page_reverse) sg_objs = sg_obj.SecurityGroup.get_objects( context, _pager=pager, validate_filters=False, **filters) return [self._make_security_group_dict(obj, fields) for obj in sg_objs] @db_api.retry_if_session_inactive() def get_security_groups_count(self, context, filters=None): filters = filters or {} return sg_obj.SecurityGroup.count( context, validate_filters=False, **filters) @db_api.retry_if_session_inactive() def get_security_group(self, context, id, fields=None, tenant_id=None): """Tenant id is given to handle the case when creating a security group rule on behalf of another use. """ if tenant_id: tmp_context_tenant_id = context.tenant_id context.tenant_id = tenant_id try: with db_api.context_manager.reader.using(context): ret = self._make_security_group_dict(self._get_security_group( context, id), fields) ret['security_group_rules'] = self.get_security_group_rules( context, {'security_group_id': [id]}) finally: if tenant_id: context.tenant_id = tmp_context_tenant_id return ret def _get_security_group(self, context, id): sg = sg_obj.SecurityGroup.get_object(context, id=id) if sg is None: raise ext_sg.SecurityGroupNotFound(id=id) return sg @db_api.retry_if_session_inactive() def delete_security_group(self, context, id): filters = {'security_group_id': [id]} with db_api.context_manager.reader.using(context): ports = self._get_port_security_group_bindings(context, filters) if ports: raise ext_sg.SecurityGroupInUse(id=id) # confirm security group exists sg = self._get_security_group(context, id) if sg['name'] == 'default' and not context.is_admin: raise ext_sg.SecurityGroupCannotRemoveDefault() kwargs = { 'context': context, 'security_group_id': id, 'security_group': sg, } self._registry_notify(resources.SECURITY_GROUP, events.BEFORE_DELETE, exc_cls=ext_sg.SecurityGroupInUse, id=id, **kwargs) with db_api.context_manager.writer.using(context): # pass security_group_rule_ids to ensure # consistency with deleted rules # get security_group_bindings and security_group one more time # so that they will be attached for session where sg will be # deleted ports = self._get_port_security_group_bindings(context, filters) sg = self._get_security_group(context, id) kwargs['security_group_rule_ids'] = [r['id'] for r in sg.rules] kwargs['security_group'] = self._make_security_group_dict(sg) self._registry_notify(resources.SECURITY_GROUP, events.PRECOMMIT_DELETE, exc_cls=ext_sg.SecurityGroupInUse, id=id, **kwargs) sg.delete() kwargs.pop('security_group') registry.notify(resources.SECURITY_GROUP, events.AFTER_DELETE, self, **kwargs) @db_api.retry_if_session_inactive() def update_security_group(self, context, id, security_group): s = security_group['security_group'] kwargs = { 'context': context, 'security_group_id': id, 'security_group': s, } self._registry_notify(resources.SECURITY_GROUP, events.BEFORE_UPDATE, exc_cls=ext_sg.SecurityGroupConflict, **kwargs) with db_api.context_manager.writer.using(context): sg = self._get_security_group(context, id) if sg.name == 'default' and 'name' in s: raise ext_sg.SecurityGroupCannotUpdateDefault() sg_dict = self._make_security_group_dict(sg) kwargs['original_security_group'] = sg_dict sg.update_fields(s) sg.update() sg_dict = self._make_security_group_dict(sg) kwargs['security_group'] = sg_dict self._registry_notify( resources.SECURITY_GROUP, events.PRECOMMIT_UPDATE, exc_cls=ext_sg.SecurityGroupConflict, **kwargs) registry.notify(resources.SECURITY_GROUP, events.AFTER_UPDATE, self, **kwargs) return sg_dict def _make_security_group_dict(self, security_group, fields=None): res = {'id': security_group['id'], 'name': security_group['name'], 'tenant_id': security_group['tenant_id'], 'description': security_group['description']} res['security_group_rules'] = [ self._make_security_group_rule_dict(r.db_obj) for r in security_group.rules ] resource_extend.apply_funcs(ext_sg.SECURITYGROUPS, res, security_group.db_obj) return db_utils.resource_fields(res, fields) @staticmethod def _make_security_group_binding_dict(security_group, fields=None): res = {'port_id': security_group['port_id'], 'security_group_id': security_group['security_group_id']} return db_utils.resource_fields(res, fields) @db_api.retry_if_session_inactive() def _create_port_security_group_binding(self, context, port_id, security_group_id): with db_api.context_manager.writer.using(context): db = sg_models.SecurityGroupPortBinding(port_id=port_id, security_group_id=security_group_id) context.session.add(db) def _get_port_security_group_bindings(self, context, filters=None, fields=None): return model_query.get_collection( context, sg_models.SecurityGroupPortBinding, self._make_security_group_binding_dict, filters=filters, fields=fields) @db_api.retry_if_session_inactive() def _delete_port_security_group_bindings(self, context, port_id): with db_api.context_manager.writer.using(context): query = model_query.query_with_hooks( context, sg_models.SecurityGroupPortBinding) bindings = query.filter( sg_models.SecurityGroupPortBinding.port_id == port_id) for binding in bindings: context.session.delete(binding) @db_api.retry_if_session_inactive() def create_security_group_rule_bulk(self, context, security_group_rules): return self._create_bulk('security_group_rule', context, security_group_rules) @db_api.retry_if_session_inactive() def create_security_group_rule_bulk_native(self, context, security_group_rules): rules = security_group_rules['security_group_rules'] scoped_session(context.session) security_group_id = self._validate_security_group_rules( context, security_group_rules) with db_api.context_manager.writer.using(context): if not self.get_security_group(context, security_group_id): raise ext_sg.SecurityGroupNotFound(id=security_group_id) self._check_for_duplicate_rules(context, rules) ret = [] for rule_dict in rules: res_rule_dict = self._create_security_group_rule( context, rule_dict, validate=False) ret.append(res_rule_dict) for rdict in ret: registry.notify( resources.SECURITY_GROUP_RULE, events.AFTER_CREATE, self, context=context, security_group_rule=rdict) return ret @db_api.retry_if_session_inactive() def create_security_group_rule(self, context, security_group_rule): res = self._create_security_group_rule(context, security_group_rule) registry.notify( resources.SECURITY_GROUP_RULE, events.AFTER_CREATE, self, context=context, security_group_rule=res) return res def _create_security_group_rule(self, context, security_group_rule, validate=True): if validate: self._validate_security_group_rule(context, security_group_rule) rule_dict = security_group_rule['security_group_rule'] remote_ip_prefix = rule_dict.get('remote_ip_prefix') if remote_ip_prefix: remote_ip_prefix = utils.AuthenticIPNetwork(remote_ip_prefix) protocol = rule_dict.get('protocol') if protocol: # object expects strings only protocol = six.text_type(protocol) args = { 'id': (rule_dict.get('id') or uuidutils.generate_uuid()), 'project_id': rule_dict['tenant_id'], 'security_group_id': rule_dict['security_group_id'], 'direction': rule_dict['direction'], 'remote_group_id': rule_dict.get('remote_group_id'), 'ethertype': rule_dict['ethertype'], 'protocol': protocol, 'remote_ip_prefix': remote_ip_prefix, 'description': rule_dict.get('description'), } port_range_min = self._safe_int(rule_dict['port_range_min']) if port_range_min is not None: args['port_range_min'] = port_range_min port_range_max = self._safe_int(rule_dict['port_range_max']) if port_range_max is not None: args['port_range_max'] = port_range_max kwargs = { 'context': context, 'security_group_rule': args } self._registry_notify(resources.SECURITY_GROUP_RULE, events.BEFORE_CREATE, exc_cls=ext_sg.SecurityGroupConflict, **kwargs) with db_api.context_manager.writer.using(context): if validate: self._check_for_duplicate_rules_in_db(context, security_group_rule) sg_rule = sg_obj.SecurityGroupRule(context, **args) sg_rule.create()<|fim▁hole|> # fetch sg_rule from db to load the sg rules with sg model # otherwise a DetachedInstanceError can occur for model extensions sg_rule = sg_obj.SecurityGroupRule.get_object(context, id=sg_rule.id) res_rule_dict = self._make_security_group_rule_dict(sg_rule.db_obj) kwargs['security_group_rule'] = res_rule_dict self._registry_notify(resources.SECURITY_GROUP_RULE, events.PRECOMMIT_CREATE, exc_cls=ext_sg.SecurityGroupConflict, **kwargs) return res_rule_dict def _get_ip_proto_number(self, protocol): if protocol is None: return # According to bug 1381379, protocol is always set to string to avoid # problems with comparing int and string in PostgreSQL. Here this # string is converted to int to give an opportunity to use it as # before. if protocol in n_const.IP_PROTOCOL_NAME_ALIASES: protocol = n_const.IP_PROTOCOL_NAME_ALIASES[protocol] return int(constants.IP_PROTOCOL_MAP.get(protocol, protocol)) def _get_ip_proto_name_and_num(self, protocol): if protocol is None: return protocol = str(protocol) if protocol in constants.IP_PROTOCOL_MAP: return [protocol, str(constants.IP_PROTOCOL_MAP.get(protocol))] elif protocol in n_const.IP_PROTOCOL_NUM_TO_NAME_MAP: return [n_const.IP_PROTOCOL_NUM_TO_NAME_MAP.get(protocol), protocol] return [protocol, protocol] def _safe_int(self, port_range): if port_range is None: return try: return int(port_range) except (ValueError, TypeError): msg = "port range must be an integer" raise n_exc.InvalidInput(error_message=msg) def _validate_port_range(self, rule): """Check that port_range is valid.""" if (rule['port_range_min'] is None and rule['port_range_max'] is None): return if not rule['protocol']: raise ext_sg.SecurityGroupProtocolRequiredWithPorts() ip_proto = self._get_ip_proto_number(rule['protocol']) # Not all firewall_driver support all these protocols, # but being strict here doesn't hurt. if ip_proto in [constants.PROTO_NUM_DCCP, constants.PROTO_NUM_SCTP, constants.PROTO_NUM_TCP, constants.PROTO_NUM_UDP, constants.PROTO_NUM_UDPLITE]: if rule['port_range_min'] == 0 or rule['port_range_max'] == 0: raise ext_sg.SecurityGroupInvalidPortValue(port=0) elif (rule['port_range_min'] is not None and rule['port_range_max'] is not None and rule['port_range_min'] <= rule['port_range_max']): pass else: raise ext_sg.SecurityGroupInvalidPortRange() elif ip_proto in [constants.PROTO_NUM_ICMP, constants.PROTO_NUM_IPV6_ICMP]: for attr, field in [('port_range_min', 'type'), ('port_range_max', 'code')]: if rule[attr] is not None and not (0 <= rule[attr] <= 255): raise ext_sg.SecurityGroupInvalidIcmpValue( field=field, attr=attr, value=rule[attr]) if (rule['port_range_min'] is None and rule['port_range_max'] is not None): raise ext_sg.SecurityGroupMissingIcmpType( value=rule['port_range_max']) def _validate_ethertype_and_protocol(self, rule): """Check if given ethertype and protocol are valid or not""" if rule['protocol'] in [constants.PROTO_NAME_IPV6_ENCAP, constants.PROTO_NAME_IPV6_FRAG, constants.PROTO_NAME_IPV6_ICMP, constants.PROTO_NAME_IPV6_ICMP_LEGACY, constants.PROTO_NAME_IPV6_NONXT, constants.PROTO_NAME_IPV6_OPTS, constants.PROTO_NAME_IPV6_ROUTE, str(constants.PROTO_NUM_IPV6_ENCAP), str(constants.PROTO_NUM_IPV6_FRAG), str(constants.PROTO_NUM_IPV6_ICMP), str(constants.PROTO_NUM_IPV6_NONXT), str(constants.PROTO_NUM_IPV6_OPTS), str(constants.PROTO_NUM_IPV6_ROUTE)]: if rule['ethertype'] == constants.IPv4: raise ext_sg.SecurityGroupEthertypeConflictWithProtocol( ethertype=rule['ethertype'], protocol=rule['protocol']) def _validate_single_tenant_and_group(self, security_group_rules): """Check that all rules belong to the same security group and tenant """ sg_groups = set() tenants = set() for rule_dict in security_group_rules['security_group_rules']: rule = rule_dict['security_group_rule'] sg_groups.add(rule['security_group_id']) if len(sg_groups) > 1: raise ext_sg.SecurityGroupNotSingleGroupRules() tenants.add(rule['tenant_id']) if len(tenants) > 1: raise ext_sg.SecurityGroupRulesNotSingleTenant() return sg_groups.pop() def _validate_security_group_rule(self, context, security_group_rule): rule = security_group_rule['security_group_rule'] self._validate_port_range(rule) self._validate_ip_prefix(rule) self._validate_ethertype_and_protocol(rule) if rule['remote_ip_prefix'] and rule['remote_group_id']: raise ext_sg.SecurityGroupRemoteGroupAndRemoteIpPrefix() remote_group_id = rule['remote_group_id'] # Check that remote_group_id exists for tenant if remote_group_id: self.get_security_group(context, remote_group_id, tenant_id=rule['tenant_id']) security_group_id = rule['security_group_id'] # Confirm that the tenant has permission # to add rules to this security group. self.get_security_group(context, security_group_id, tenant_id=rule['tenant_id']) return security_group_id def _validate_security_group_rules(self, context, security_group_rules): sg_id = self._validate_single_tenant_and_group(security_group_rules) for rule in security_group_rules['security_group_rules']: self._validate_security_group_rule(context, rule) return sg_id def _make_security_group_rule_dict(self, security_group_rule, fields=None): res = {'id': security_group_rule['id'], 'tenant_id': security_group_rule['tenant_id'], 'security_group_id': security_group_rule['security_group_id'], 'ethertype': security_group_rule['ethertype'], 'direction': security_group_rule['direction'], 'protocol': security_group_rule['protocol'], 'port_range_min': security_group_rule['port_range_min'], 'port_range_max': security_group_rule['port_range_max'], 'remote_ip_prefix': security_group_rule['remote_ip_prefix'], 'remote_group_id': security_group_rule['remote_group_id']} resource_extend.apply_funcs(ext_sg.SECURITYGROUPRULES, res, security_group_rule) return db_utils.resource_fields(res, fields) def _make_security_group_rule_filter_dict(self, security_group_rule): sgr = security_group_rule['security_group_rule'] res = {'tenant_id': [sgr['tenant_id']], 'security_group_id': [sgr['security_group_id']], 'direction': [sgr['direction']]} include_if_present = ['protocol', 'port_range_max', 'port_range_min', 'ethertype', 'remote_group_id'] for key in include_if_present: value = sgr.get(key) if value: res[key] = [value] # protocol field will get corresponding name and number value = sgr.get('protocol') if value: res['protocol'] = self._get_ip_proto_name_and_num(value) return res def _rules_equal(self, rule1, rule2): """Determines if two rules are equal ignoring id field.""" rule1_copy = rule1.copy() rule2_copy = rule2.copy() rule1_copy.pop('id', None) rule2_copy.pop('id', None) return rule1_copy == rule2_copy def _check_for_duplicate_rules(self, context, security_group_rules): for i in security_group_rules: found_self = False for j in security_group_rules: if self._rules_equal(i['security_group_rule'], j['security_group_rule']): if found_self: raise ext_sg.DuplicateSecurityGroupRuleInPost(rule=i) found_self = True self._check_for_duplicate_rules_in_db(context, i) def _check_for_duplicate_rules_in_db(self, context, security_group_rule): # Check in database if rule exists filters = self._make_security_group_rule_filter_dict( security_group_rule) rule_dict = security_group_rule['security_group_rule'].copy() rule_dict.pop('description', None) keys = rule_dict.keys() fields = list(keys) + ['id'] if 'remote_ip_prefix' not in fields: fields += ['remote_ip_prefix'] db_rules = self.get_security_group_rules(context, filters, fields=fields) # Note(arosen): the call to get_security_group_rules wildcards # values in the filter that have a value of [None]. For # example, filters = {'remote_group_id': [None]} will return # all security group rules regardless of their value of # remote_group_id. Therefore it is not possible to do this # query unless the behavior of _get_collection() # is changed which cannot be because other methods are already # relying on this behavior. Therefore, we do the filtering # below to check for these corner cases. rule_dict.pop('id', None) sg_protocol = rule_dict.pop('protocol', None) remote_ip_prefix = rule_dict.pop('remote_ip_prefix', None) for db_rule in db_rules: rule_id = db_rule.pop('id', None) # remove protocol and match separately for number and type db_protocol = db_rule.pop('protocol', None) is_protocol_matching = ( self._get_ip_proto_name_and_num(db_protocol) == self._get_ip_proto_name_and_num(sg_protocol)) db_remote_ip_prefix = db_rule.pop('remote_ip_prefix', None) duplicate_ip_prefix = self._validate_duplicate_ip_prefix( remote_ip_prefix, db_remote_ip_prefix) if (is_protocol_matching and duplicate_ip_prefix and rule_dict == db_rule): raise ext_sg.SecurityGroupRuleExists(rule_id=rule_id) def _validate_duplicate_ip_prefix(self, ip_prefix, other_ip_prefix): if other_ip_prefix is not None: other_ip_prefix = str(other_ip_prefix) all_address = ['0.0.0.0/0', '::/0', None] if ip_prefix == other_ip_prefix: return True elif ip_prefix in all_address and other_ip_prefix in all_address: return True return False def _validate_ip_prefix(self, rule): """Check that a valid cidr was specified as remote_ip_prefix No need to check that it is in fact an IP address as this is already validated by attribute validators. Check that rule ethertype is consistent with remote_ip_prefix ip type. Add mask to ip_prefix if absent (192.168.1.10 -> 192.168.1.10/32). """ input_prefix = rule['remote_ip_prefix'] if input_prefix: addr = netaddr.IPNetwork(input_prefix) # set input_prefix to always include the netmask: rule['remote_ip_prefix'] = str(addr) # check consistency of ethertype with addr version if rule['ethertype'] != "IPv%d" % (addr.version): raise ext_sg.SecurityGroupRuleParameterConflict( ethertype=rule['ethertype'], cidr=input_prefix) @db_api.retry_if_session_inactive() def get_security_group_rules(self, context, filters=None, fields=None, sorts=None, limit=None, marker=None, page_reverse=False): filters = filters or {} pager = base_obj.Pager( sorts=sorts, marker=marker, limit=limit, page_reverse=page_reverse) rule_objs = sg_obj.SecurityGroupRule.get_objects( context, _pager=pager, validate_filters=False, **filters ) return [ self._make_security_group_rule_dict(obj.db_obj, fields) for obj in rule_objs ] @db_api.retry_if_session_inactive() def get_security_group_rules_count(self, context, filters=None): filters = filters or {} return sg_obj.SecurityGroupRule.count( context, validate_filters=False, **filters) @db_api.retry_if_session_inactive() def get_security_group_rule(self, context, id, fields=None): security_group_rule = self._get_security_group_rule(context, id) return self._make_security_group_rule_dict( security_group_rule.db_obj, fields) def _get_security_group_rule(self, context, id): sgr = sg_obj.SecurityGroupRule.get_object(context, id=id) if sgr is None: raise ext_sg.SecurityGroupRuleNotFound(id=id) return sgr @db_api.retry_if_session_inactive() def delete_security_group_rule(self, context, id): kwargs = { 'context': context, 'security_group_rule_id': id } self._registry_notify(resources.SECURITY_GROUP_RULE, events.BEFORE_DELETE, id=id, exc_cls=ext_sg.SecurityGroupRuleInUse, **kwargs) with db_api.context_manager.writer.using(context): sgr = self._get_security_group_rule(context, id) kwargs['security_group_id'] = sgr['security_group_id'] self._registry_notify(resources.SECURITY_GROUP_RULE, events.PRECOMMIT_DELETE, exc_cls=ext_sg.SecurityGroupRuleInUse, id=id, **kwargs) sgr.delete() registry.notify( resources.SECURITY_GROUP_RULE, events.AFTER_DELETE, self, **kwargs) @staticmethod @resource_extend.extends([port_def.COLLECTION_NAME]) def _extend_port_dict_security_group(port_res, port_db): # Security group bindings will be retrieved from the SQLAlchemy # model. As they're loaded eagerly with ports because of the # joined load they will not cause an extra query. security_group_ids = [sec_group_mapping['security_group_id'] for sec_group_mapping in port_db.security_groups] port_res[ext_sg.SECURITYGROUPS] = security_group_ids return port_res def _process_port_create_security_group(self, context, port, security_group_ids): if validators.is_attr_set(security_group_ids): for security_group_id in security_group_ids: self._create_port_security_group_binding(context, port['id'], security_group_id) # Convert to list as a set might be passed here and # this has to be serialized port[ext_sg.SECURITYGROUPS] = (security_group_ids and list(security_group_ids) or []) def _get_default_sg_id(self, context, tenant_id): default_group = sg_obj.DefaultSecurityGroup.get_object( context, project_id=tenant_id, ) if default_group: return default_group.security_group_id @registry.receives(resources.PORT, [events.BEFORE_CREATE, events.BEFORE_UPDATE]) @registry.receives(resources.NETWORK, [events.BEFORE_CREATE]) def _ensure_default_security_group_handler(self, resource, event, trigger, context, **kwargs): if event == events.BEFORE_UPDATE: tenant_id = kwargs['original_' + resource]['tenant_id'] else: tenant_id = kwargs[resource]['tenant_id'] self._ensure_default_security_group(context, tenant_id) def _ensure_default_security_group(self, context, tenant_id): """Create a default security group if one doesn't exist. :returns: the default security group id for given tenant. """ default_group_id = self._get_default_sg_id(context, tenant_id) if default_group_id: return default_group_id security_group = { 'security_group': {'name': 'default', 'tenant_id': tenant_id, 'description': _('Default security group')} } return self.create_security_group(context, security_group, default_sg=True)['id'] def _get_security_groups_on_port(self, context, port): """Check that all security groups on port belong to tenant. :returns: all security groups IDs on port belonging to tenant. """ port = port['port'] if not validators.is_attr_set(port.get(ext_sg.SECURITYGROUPS)): return if port.get('device_owner') and net.is_port_trusted(port): return port_sg = port.get(ext_sg.SECURITYGROUPS, []) filters = {'id': port_sg} tenant_id = port.get('tenant_id') if tenant_id: filters['tenant_id'] = [tenant_id] valid_groups = set(g['id'] for g in self.get_security_groups(context, fields=['id'], filters=filters)) requested_groups = set(port_sg) port_sg_missing = requested_groups - valid_groups if port_sg_missing: raise ext_sg.SecurityGroupNotFound(id=', '.join(port_sg_missing)) return list(requested_groups) def _ensure_default_security_group_on_port(self, context, port): # we don't apply security groups for dhcp, router port = port['port'] if port.get('device_owner') and net.is_port_trusted(port): return if not validators.is_attr_set(port.get(ext_sg.SECURITYGROUPS)): default_sg = self._ensure_default_security_group(context, port['tenant_id']) port[ext_sg.SECURITYGROUPS] = [default_sg] def _check_update_deletes_security_groups(self, port): """Return True if port has as a security group and it's value is either [] or not is_attr_set, otherwise return False """ if (ext_sg.SECURITYGROUPS in port['port'] and not (validators.is_attr_set(port['port'][ext_sg.SECURITYGROUPS]) and port['port'][ext_sg.SECURITYGROUPS] != [])): return True return False def _check_update_has_security_groups(self, port): """Return True if port has security_groups attribute set and its not empty, or False otherwise. This method is called both for port create and port update. """ if (ext_sg.SECURITYGROUPS in port['port'] and (validators.is_attr_set(port['port'][ext_sg.SECURITYGROUPS]) and port['port'][ext_sg.SECURITYGROUPS] != [])): return True return False def update_security_group_on_port(self, context, id, port, original_port, updated_port): """Update security groups on port. This method returns a flag which indicates request notification is required and does not perform notification itself. It is because another changes for the port may require notification. """ need_notify = False port_updates = port['port'] if (ext_sg.SECURITYGROUPS in port_updates and not helpers.compare_elements( original_port.get(ext_sg.SECURITYGROUPS), port_updates[ext_sg.SECURITYGROUPS])): # delete the port binding and read it with the new rules port_updates[ext_sg.SECURITYGROUPS] = ( self._get_security_groups_on_port(context, port)) self._delete_port_security_group_bindings(context, id) self._process_port_create_security_group( context, updated_port, port_updates[ext_sg.SECURITYGROUPS]) need_notify = True else: updated_port[ext_sg.SECURITYGROUPS] = ( original_port[ext_sg.SECURITYGROUPS]) return need_notify<|fim▁end|>
<|file_name|>struct_defs.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // This test case tests the incremental compilation hash (ICH) implementation // for struct definitions. // The general pattern followed here is: Change one thing between rev1 and rev2 // and make sure that the hash has changed, then change nothing between rev2 and // rev3 and make sure that the hash has not changed. // We also test the ICH for struct definitions exported in metadata. Same as // above, we want to make sure that the change between rev1 and rev2 also // results in a change of the ICH for the struct's metadata, and that it stays // the same between rev2 and rev3. // compile-pass // revisions: cfail1 cfail2 cfail3 // compile-flags: -Z query-dep-graph -Zincremental-ignore-spans #![allow(warnings)] #![feature(rustc_attrs)] #![crate_type="rlib"] // Layout ---------------------------------------------------------------------- #[cfg(cfail1)] pub struct LayoutPacked; #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_dirty(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] #[repr(packed)] pub struct LayoutPacked; #[cfg(cfail1)] struct LayoutC; #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_dirty(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] #[repr(C)] struct LayoutC; // Tuple Struct Change Field Type ---------------------------------------------- #[cfg(cfail1)] struct TupleStructFieldType(i32); #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_clean(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] // Note that changing the type of a field does not change the type of the struct or enum, but // adding/removing fields or changing a fields name or visibility does. struct TupleStructFieldType( u32 ); // Tuple Struct Add Field ------------------------------------------------------ #[cfg(cfail1)] struct TupleStructAddField(i32); #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_dirty(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct TupleStructAddField( i32, u32 ); // Tuple Struct Field Visibility ----------------------------------------------- #[cfg(cfail1)] struct TupleStructFieldVisibility(char); #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_dirty(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct TupleStructFieldVisibility(pub char); // Record Struct Field Type ---------------------------------------------------- #[cfg(cfail1)] struct RecordStructFieldType { x: f32 } #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_clean(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] // Note that changing the type of a field does not change the type of the struct or enum, but // adding/removing fields or changing a fields name or visibility does. struct RecordStructFieldType { x: u64 } // Record Struct Field Name ---------------------------------------------------- #[cfg(cfail1)] struct RecordStructFieldName { x: f32 } #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_dirty(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct RecordStructFieldName { y: f32 } // Record Struct Add Field ----------------------------------------------------- #[cfg(cfail1)] struct RecordStructAddField { x: f32 } #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_dirty(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct RecordStructAddField { x: f32, y: () } // Record Struct Field Visibility ---------------------------------------------- #[cfg(cfail1)] struct RecordStructFieldVisibility { x: f32 } #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_dirty(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct RecordStructFieldVisibility { pub x: f32 } // Add Lifetime Parameter ------------------------------------------------------ #[cfg(cfail1)] struct AddLifetimeParameter<'a>(&'a f32, &'a f64); #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_dirty(label="TypeOfItem", cfg="cfail2")] #[rustc_dirty(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct AddLifetimeParameter<'a, 'b>(&'a f32, &'b f64); // Add Lifetime Parameter Bound ------------------------------------------------ #[cfg(cfail1)] struct AddLifetimeParameterBound<'a, 'b>(&'a f32, &'b f64); #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_clean(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_dirty(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct AddLifetimeParameterBound<'a, 'b: 'a>( &'a f32, &'b f64 ); #[cfg(cfail1)] struct AddLifetimeParameterBoundWhereClause<'a, 'b>(&'a f32, &'b f64); #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_clean(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_dirty(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct AddLifetimeParameterBoundWhereClause<'a, 'b>( &'a f32, &'b f64) where 'b: 'a; // Add Type Parameter ---------------------------------------------------------- #[cfg(cfail1)] struct AddTypeParameter<T1>(T1, T1); #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_dirty(label="TypeOfItem", cfg="cfail2")] #[rustc_dirty(label="GenericsOfItem", cfg="cfail2")] #[rustc_dirty(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct AddTypeParameter<T1, T2>( // The field contains the parent's Generics, so it's dirty even though its // type hasn't changed. T1, T2 ); // Add Type Parameter Bound ---------------------------------------------------- #[cfg(cfail1)] struct AddTypeParameterBound<T>(T); #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_clean(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_dirty(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct AddTypeParameterBound<T: Send>( T ); #[cfg(cfail1)] struct AddTypeParameterBoundWhereClause<T>(T); #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_clean(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_dirty(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct AddTypeParameterBoundWhereClause<T>( T ) where T: Sync; // Empty struct ---------------------------------------------------------------- // Since we cannot change anything in this case, we just make sure that the // fingerprint is stable (i.e., that there are no random influences like memory // addresses taken into account by the hashing algorithm). // Note: there is no #[cfg(...)], so this is ALWAYS compiled<|fim▁hole|>#[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] pub struct EmptyStruct; // Visibility ------------------------------------------------------------------ #[cfg(cfail1)] struct Visibility; #[cfg(not(cfail1))] #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_clean(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] pub struct Visibility; struct ReferencedType1; struct ReferencedType2; // Tuple Struct Change Field Type Indirectly ----------------------------------- mod tuple_struct_change_field_type_indirectly { #[cfg(cfail1)] use super::ReferencedType1 as FieldType; #[cfg(not(cfail1))] use super::ReferencedType2 as FieldType; #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_clean(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct TupleStruct( FieldType ); } // Record Struct Change Field Type Indirectly ----------------------------------- mod record_struct_change_field_type_indirectly { #[cfg(cfail1)] use super::ReferencedType1 as FieldType; #[cfg(not(cfail1))] use super::ReferencedType2 as FieldType; #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_clean(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct RecordStruct { _x: FieldType } } trait ReferencedTrait1 {} trait ReferencedTrait2 {} // Change Trait Bound Indirectly ----------------------------------------------- mod change_trait_bound_indirectly { #[cfg(cfail1)] use super::ReferencedTrait1 as Trait; #[cfg(not(cfail1))] use super::ReferencedTrait2 as Trait; #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_clean(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_dirty(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct Struct<T: Trait>(T); } // Change Trait Bound Indirectly In Where Clause ------------------------------- mod change_trait_bound_indirectly_in_where_clause { #[cfg(cfail1)] use super::ReferencedTrait1 as Trait; #[cfg(not(cfail1))] use super::ReferencedTrait2 as Trait; #[rustc_dirty(label="Hir", cfg="cfail2")] #[rustc_dirty(label="HirBody", cfg="cfail2")] #[rustc_clean(label="TypeOfItem", cfg="cfail2")] #[rustc_clean(label="GenericsOfItem", cfg="cfail2")] #[rustc_dirty(label="PredicatesOfItem", cfg="cfail2")] #[rustc_clean(label="Hir", cfg="cfail3")] #[rustc_clean(label="HirBody", cfg="cfail3")] #[rustc_clean(label="TypeOfItem", cfg="cfail3")] #[rustc_clean(label="GenericsOfItem", cfg="cfail3")] #[rustc_clean(label="PredicatesOfItem", cfg="cfail3")] struct Struct<T>(T) where T : Trait; }<|fim▁end|>
#[rustc_clean(label="Hir", cfg="cfail2")] #[rustc_clean(label="HirBody", cfg="cfail2")] #[rustc_clean(label="TypeOfItem", cfg="cfail2")]
<|file_name|>pfsense-updateCRL.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import sys from pfsense_api import PfSenseAPI from datetime import datetime from pfsense_cmdline import PfSenseOptionParser from ConfigParser import ConfigParser from pfsense_logger import PfSenseLogger as logging import os.path parser = PfSenseOptionParser() parser.add_option("--id", dest="crl_id", help="ID of the CRL to update") parser.add_option("--name", dest="name", help="Descriptive name of the CRL", default="Imported CRL") parser.add_option("--crl", dest="crl", help="File containing CRL in PEM format", metavar="CRL_FILE") parser.add_option("--ssl_verification", dest="ssl_verification", help="Whether SSL should be verified or not, valid values are yes/no, true/false, 1/0", default=True, metavar="yes/no") parser.add_option("--overwrite", dest="overwrite", default=False, help="Command line options will overwrite same settings in config file", action="store_true") (options, args) = parser.parse_args() logger = logging.setupLogger(options.logging) parser.check_cmd_options( options ) required_items = ['crl_id', 'crl', 'host', 'username', 'password'] options_cmdline = vars(options).copy() del options_cmdline['config'] del options_cmdline['overwrite']<|fim▁hole|>configFile.read(options.config) api = PfSenseAPI() for section in configFile.sections(): logger.info("Working on %s" % section) parsed_options = parser.parse_individual_options(configFile.items(section), options_cmdline, overwrite = options.overwrite, bool_keys = ['ssl_verification']) required_items_missed = False missed_items = parser.check_required_options(parsed_options, required_items) for item in missed_items: logger.error('%s is reqired for entry %s' % ( item, section)) required_items_missed = True if required_items_missed: continue if not os.path.isfile(parsed_options['crl']): logger.error('CRL file %s does not exist?' % parsed_options['crl']) continue try: crlFile = open(parsed_options['crl'], 'r') crlData = crlFile.read() crlFile.close() except: logger.error("Error while read CRL data from file %s" % parsed_options['crl']) continue api['options'] = parsed_options api.login() (rc, data, contentType) = api.call( '/system_crlmanager.php', 'POST', apiData = { 'method': 'existing', 'descr': '%s (last refresh: %s)' % (options.name, datetime.now().isoformat()), 'crltext': crlData, 'submit': 'Save' }, itemData = { 'id': parsed_options['crl_id'], 'act': 'editimported' }) api.logout() if rc == 302: logger.info('CRL Update successful for %s' % (section)) else: logger.info('CRL Update failed for %s' % ( section))<|fim▁end|>
configFile = ConfigParser()
<|file_name|>export_all_graphml.py<|end_file_name|><|fim▁begin|># Albert Cardona 2014-11-20 # This file is meant to be run from within ./manage.py shell in the environment, like: # [1] load export_all_graphml.py # [2] project_id = 12 # [2] export(project_id, "all.graphml") # # Will generate a gzip'ed file like "all.graphml.gz" # # Includes all skeletons with more than 1 treenode; # each skeleton is an undirected graph, where each treenode is a node # (with the skeleton ID and the location as extra attributes) # and each relationship between child and parent treenodes is an undirected edge # that has the skeleton ID as an extra attribute. # Each presynaptic+postsynaptic connection is a directed edge between treenodes; # these directed edges also contain the skeleton ID of the pre- and the postsynaptic # skeletons. from __future__ import with_statement from django.db import connection from django.db import transaction import gzip import sys def writeOneSkeleton(file, cursor, skid): cursor.execute(''' select id, parent_id, location_x, location_y, location_z from treenode where skeleton_id=%s ''' % skid) for row in cursor.fetchall(): file.write('''<node id="n%s"> <data key="skid">%s</data> <data key="x">%s</data> <data key="y">%s</data> <data key="z">%s</data> </node>\n''' % (row[0], skid, row[2], row[3], row[4])) if row[1]: file.write('<edge id="e%s" directed="false" source="n%s" target="n%s" />\n' % (row[0], row[0], row[1])) @transaction.atomic def export(project_id, filename): project_id = int(project_id) cursor = connection.cursor() with gzip.open(filename + '.gz', 'w') as file: file.write('''<?xml version="1.0" encoding="UTF-8"?> <graphml xmlns="http://graphml.graphdrawing.org/xmlns" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd"> <key id="skid" for="node" attr.name="skeleton id" attr.type="long"/> <key id="x" for="node" attr.name="x" attr.type="float"/> <key id="y" for="node" attr.name="y" attr.type="float"/> <key id="z" for="node" attr.name="z" attr.type="float"/> <key id="pre_skid" for="edge" attr.name="presynaptic skeleton id" attr.type="long"/> <key id="post_skid" for="edge" attr.name="postsynaptic skeleton id" attr.type="long"/> <graph id="CNS">\n''') # cursor.execute(''' select skeleton_id from treenode where project_id=%s group by skeleton_id having count(*) > 1 ''' % project_id) # for row in cursor.fetchall(): print("Writing skeleton nodes for %s" % row[0]) writeOneSkeleton(file, cursor, row[0]) # cursor.execute(''' select relation_name, id from relation where project_id=%s ''' % project_id) relations = dict(cursor.fetchall()) # cursor.execute(''' select tc2.id, tc1.treenode_id, tc2.treenode_id, tc1.skeleton_id, tc2.skeleton_id from treenode_connector tc1, treenode_connector tc2 where tc1.project_id=%s and tc1.relation_id = %s and tc2.relation_id = %s and tc1.connector_id = tc2.connector_id and tc1.skeleton_id IN (select skeleton_id from treenode where project_id=%s group by skeleton_id having count(*) > 1) ''' % (project_id, relations['presynaptic_to'], relations['postsynaptic_to'], project_id)) # print("Writing synapses") for row in cursor.fetchall(): file.write('<edge id="e%s" directed="true" source="n%s" target="n%s">\n<data key="pre_skid">%s</data>\n<data key="post_skid">%s</data>\n</edge>\n' % row) # file.write("</graph>\n</graphml>") def run(): if sys.argv < 3: print("Need 2 arguments: <project id> <filename.gml>") else:<|fim▁hole|> project_id = int(sys.argv[1]) filename = sys.argv[2] run(project_id, filename)<|fim▁end|>
<|file_name|>joiner.rs<|end_file_name|><|fim▁begin|>// Exercise 2.3 // I were better to be eaten to death with a rust than to be scoured to nothing with perpetual motion. use std::os; use std::io::File; fn xor(a: &[u8], b: &[u8]) -> ~[u8] { let mut ret = ~[]; for i in range(0, a.len()) { ret.push(a[i] ^ b[i]); } ret } fn main() { let args: ~[~str] = os::args(); if args.len() != 3 { println!("Usage: {:s} <inputfile1> <inputfile2>", args[0]); } else {<|fim▁hole|> let fname1 = &args[1]; let fname2 = &args[2]; let path1 = Path::new(fname1.clone()); let path2 = Path::new(fname2.clone()); let share_file1 = File::open(&path1); let share_file2 = File::open(&path2); match (share_file1, share_file2) { (Some(mut share1), Some(mut share2)) => { let share1bytes: ~[u8] = share1.read_to_end(); let share2bytes: ~[u8] = share2.read_to_end(); print!("{:s}", std::str::from_utf8_owned( xor(share1bytes, share2bytes))); } , (_, _) => fail!("Error opening input files!") } } }<|fim▁end|>
<|file_name|>styles.js<|end_file_name|><|fim▁begin|>import { StyleSheet } from "react-native"; const styles = StyleSheet.create({ container: { flex: 1, justifyContent: "flex-start", alignItems: "center", backgroundColor: "#669999" }, buttons: { // flex: 0.15, flexDirection: "row", alignItems: "center", marginVertical: 20 }, button: { marginHorizontal: 20, padding: 20, backgroundColor: "#0D4D4D", color: "white", textAlign: "center" }, selectedButton: { backgroundColor: "#006699" },<|fim▁hole|> body: { // flex: 0.8, justifyContent: "flex-start", alignItems: "center" }, subTitle: { marginVertical: 10 }, viewport: { // flex: 1, alignSelf: "center", backgroundColor: "white" } }); export default styles;<|fim▁end|>
<|file_name|>Patches.cpp<|end_file_name|><|fim▁begin|>// This is an open source non-commercial project. Dear PVS-Studio, please check it. // PVS-Studio Static Code Analyzer for C, C++ and C#: http://www.viva64.com // ****************************************************************** // ****************************************************************** // * // * This file is part of the Cxbx project. // * // * Cxbx and Cxbe are free software; you can redistribute them // * and/or modify them under the terms of the GNU General Public // * License as published by the Free Software Foundation; either // * version 2 of the license, or (at your option) any later version. // * // * This program is distributed in the hope that it will be useful, // * but WITHOUT ANY WARRANTY; without even the implied warranty of // * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // * GNU General Public License for more details. // * // * You should have recieved a copy of the GNU General Public License // * along with this program; see the file COPYING. // * If not, write to the Free Software Foundation, Inc., // * 59 Temple Place - Suite 330, Bostom, MA 02111-1307, USA. // * // * (c) 2018 Luke Usher <[email protected]> // * // * All rights reserved // * // ****************************************************************** #include "core\kernel\init\CxbxKrnl.h" #include "core\kernel\support\Emu.h" #include "core\hle\D3D8\Direct3D9/Direct3D9.h" #include "core\hle\DSOUND\DirectSound\DirectSound.hpp" #include "Patches.hpp" #include "Intercept.hpp" #include <map> #include <unordered_map> #include <subhook.h> typedef struct { const void* patchFunc; // Function pointer of the patch in Cxbx-R codebase const uint32_t flags; // Patch Flags } xbox_patch_t; const uint32_t PATCH_ALWAYS = 1 << 0; const uint32_t PATCH_HLE_D3D = 1 << 1; const uint32_t PATCH_HLE_DSOUND = 1 << 2; const uint32_t PATCH_HLE_OHCI = 1 << 3; const uint32_t PATCH_IS_FIBER = 1 << 4; #define PATCH_ENTRY(Name, Func, Flags) \ { Name, xbox_patch_t { (void *)&Func, Flags} } // Map of Xbox Patch names to Emulator Patches // A std::string is used as it's possible for a symbol to have multiple names // This allows for the eventual importing of Dxbx symbol files and even IDA signatures too! std::map<const std::string, const xbox_patch_t> g_PatchTable = { // Direct3D PATCH_ENTRY("CDevice_SetStateUP", xbox::EMUPATCH(CDevice_SetStateUP), PATCH_HLE_D3D), PATCH_ENTRY("CDevice_SetStateVB", xbox::EMUPATCH(CDevice_SetStateVB), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_Begin", xbox::EMUPATCH(D3DDevice_Begin), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_BeginPush", xbox::EMUPATCH(D3DDevice_BeginPush), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_BeginPush2", xbox::EMUPATCH(D3DDevice_BeginPush2), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_BeginVisibilityTest", xbox::EMUPATCH(D3DDevice_BeginVisibilityTest), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_BlockOnFence", xbox::EMUPATCH(D3DDevice_BlockOnFence), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_BlockUntilVerticalBlank", xbox::EMUPATCH(D3DDevice_BlockUntilVerticalBlank), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_Clear", xbox::EMUPATCH(D3DDevice_Clear), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_CopyRects", xbox::EMUPATCH(D3DDevice_CopyRects), PATCH_HLE_D3D), // PATCH_ENTRY("D3DDevice_CreateVertexShader", xbox::EMUPATCH(D3DDevice_CreateVertexShader), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_DeleteVertexShader", xbox::EMUPATCH(D3DDevice_DeleteVertexShader), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_DeleteVertexShader_0", xbox::EMUPATCH(D3DDevice_DeleteVertexShader_0), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_DrawIndexedVertices", xbox::EMUPATCH(D3DDevice_DrawIndexedVertices), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_DrawIndexedVerticesUP", xbox::EMUPATCH(D3DDevice_DrawIndexedVerticesUP), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_DrawRectPatch", xbox::EMUPATCH(D3DDevice_DrawRectPatch), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_DrawTriPatch", xbox::EMUPATCH(D3DDevice_DrawTriPatch), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_DrawVertices", xbox::EMUPATCH(D3DDevice_DrawVertices), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_DrawVertices_4", xbox::EMUPATCH(D3DDevice_DrawVertices_4), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_DrawVerticesUP", xbox::EMUPATCH(D3DDevice_DrawVerticesUP), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_DrawVerticesUP_12", xbox::EMUPATCH(D3DDevice_DrawVerticesUP_12), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_EnableOverlay", xbox::EMUPATCH(D3DDevice_EnableOverlay), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_End", xbox::EMUPATCH(D3DDevice_End), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_EndPush", xbox::EMUPATCH(D3DDevice_EndPush), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_EndVisibilityTest", xbox::EMUPATCH(D3DDevice_EndVisibilityTest), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_EndVisibilityTest_0", xbox::EMUPATCH(D3DDevice_EndVisibilityTest_0), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_FlushVertexCache", xbox::EMUPATCH(D3DDevice_FlushVertexCache), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetBackBuffer", xbox::EMUPATCH(D3DDevice_GetBackBuffer), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetBackBuffer2", xbox::EMUPATCH(D3DDevice_GetBackBuffer2), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetBackBuffer2_0__LTCG_eax1", xbox::EMUPATCH(D3DDevice_GetBackBuffer2_0__LTCG_eax1), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetDisplayFieldStatus", xbox::EMUPATCH(D3DDevice_GetDisplayFieldStatus), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetGammaRamp", xbox::EMUPATCH(D3DDevice_GetGammaRamp), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetMaterial", xbox::EMUPATCH(D3DDevice_GetMaterial), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetModelView", xbox::EMUPATCH(D3DDevice_GetModelView), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetOverlayUpdateStatus", xbox::EMUPATCH(D3DDevice_GetOverlayUpdateStatus), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetProjectionViewportMatrix", xbox::EMUPATCH(D3DDevice_GetProjectionViewportMatrix), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetShaderConstantMode", xbox::EMUPATCH(D3DDevice_GetShaderConstantMode), PATCH_HLE_D3D), //PATCH_ENTRY("D3DDevice_GetTransform", xbox::EMUPATCH(D3DDevice_GetTransform), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetVertexShader", xbox::EMUPATCH(D3DDevice_GetVertexShader), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetVertexShaderConstant", xbox::EMUPATCH(D3DDevice_GetVertexShaderConstant), PATCH_HLE_D3D), //PATCH_ENTRY("D3DDevice_GetVertexShaderDeclaration", xbox::EMUPATCH(D3DDevice_GetVertexShaderDeclaration), PATCH_HLE_D3D), //PATCH_ENTRY("D3DDevice_GetVertexShaderFunction", xbox::EMUPATCH(D3DDevice_GetVertexShaderFunction), PATCH_HLE_D3D), //PATCH_ENTRY("D3DDevice_GetVertexShaderInput", xbox::EMUPATCH(D3DDevice_GetVertexShaderInput), PATCH_HLE_D3D), //PATCH_ENTRY("D3DDevice_GetVertexShaderSize", xbox::EMUPATCH(D3DDevice_GetVertexShaderSize), PATCH_HLE_D3D), //PATCH_ENTRY("D3DDevice_GetVertexShaderType", xbox::EMUPATCH(D3DDevice_GetVertexShaderType), PATCH_HLE_D3D), //PATCH_ENTRY("D3DDevice_GetViewportOffsetAndScale", xbox::EMUPATCH(D3DDevice_GetViewportOffsetAndScale), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_GetVisibilityTestResult", xbox::EMUPATCH(D3DDevice_GetVisibilityTestResult), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_InsertCallback", xbox::EMUPATCH(D3DDevice_InsertCallback), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_InsertFence", xbox::EMUPATCH(D3DDevice_InsertFence), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_IsBusy", xbox::EMUPATCH(D3DDevice_IsBusy), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_IsFencePending", xbox::EMUPATCH(D3DDevice_IsFencePending), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_LightEnable", xbox::EMUPATCH(D3DDevice_LightEnable), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_LoadVertexShader", xbox::EMUPATCH(D3DDevice_LoadVertexShader), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_LoadVertexShaderProgram", xbox::EMUPATCH(D3DDevice_LoadVertexShaderProgram), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_LoadVertexShader_0__LTCG_eax_Address_ecx_Handle", xbox::EMUPATCH(D3DDevice_LoadVertexShader_0__LTCG_eax_Address_ecx_Handle), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_LoadVertexShader_0__LTCG_eax_Address_edx_Handle", xbox::EMUPATCH(D3DDevice_LoadVertexShader_0__LTCG_eax_Address_edx_Handle), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_LoadVertexShader_4", xbox::EMUPATCH(D3DDevice_LoadVertexShader_4), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_MultiplyTransform", xbox::EMUPATCH(D3DDevice_MultiplyTransform), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_PersistDisplay", xbox::EMUPATCH(D3DDevice_PersistDisplay), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_Present", xbox::EMUPATCH(D3DDevice_Present), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_PrimeVertexCache", xbox::EMUPATCH(D3DDevice_PrimeVertexCache), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_Reset", xbox::EMUPATCH(D3DDevice_Reset), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_RunPushBuffer", xbox::EMUPATCH(D3DDevice_RunPushBuffer), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_RunVertexStateShader", xbox::EMUPATCH(D3DDevice_RunVertexStateShader), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SelectVertexShader", xbox::EMUPATCH(D3DDevice_SelectVertexShader), PATCH_HLE_D3D), //PATCH_ENTRY("D3DDevice_SelectVertexShaderDirect", xbox::EMUPATCH(D3DDevice_SelectVertexShaderDirect), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SelectVertexShader_0", xbox::EMUPATCH(D3DDevice_SelectVertexShader_0), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SelectVertexShader_4", xbox::EMUPATCH(D3DDevice_SelectVertexShader_4), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetBackBufferScale", xbox::EMUPATCH(D3DDevice_SetBackBufferScale), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetDepthClipPlanes", xbox::EMUPATCH(D3DDevice_SetDepthClipPlanes), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetFlickerFilter", xbox::EMUPATCH(D3DDevice_SetFlickerFilter), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetFlickerFilter_0", xbox::EMUPATCH(D3DDevice_SetFlickerFilter_0), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetGammaRamp", xbox::EMUPATCH(D3DDevice_SetGammaRamp), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetIndices", xbox::EMUPATCH(D3DDevice_SetIndices), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetIndices_4", xbox::EMUPATCH(D3DDevice_SetIndices_4), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetLight", xbox::EMUPATCH(D3DDevice_SetLight), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetMaterial", xbox::EMUPATCH(D3DDevice_SetMaterial), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetModelView", xbox::EMUPATCH(D3DDevice_SetModelView), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetPalette", xbox::EMUPATCH(D3DDevice_SetPalette), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetPalette_4", xbox::EMUPATCH(D3DDevice_SetPalette_4), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetPixelShader", xbox::EMUPATCH(D3DDevice_SetPixelShader), PATCH_HLE_D3D), //PATCH_ENTRY("D3DDevice_SetPixelShaderConstant_4", xbox::EMUPATCH(D3DDevice_SetPixelShaderConstant_4), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetPixelShader_0", xbox::EMUPATCH(D3DDevice_SetPixelShader_0), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetRenderState_Simple", xbox::EMUPATCH(D3DDevice_SetRenderState_Simple), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetRenderTarget", xbox::EMUPATCH(D3DDevice_SetRenderTarget), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetRenderTargetFast", xbox::EMUPATCH(D3DDevice_SetRenderTargetFast), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetRenderTarget_0", xbox::EMUPATCH(D3DDevice_SetRenderTarget_0), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetScreenSpaceOffset", xbox::EMUPATCH(D3DDevice_SetScreenSpaceOffset), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetShaderConstantMode", xbox::EMUPATCH(D3DDevice_SetShaderConstantMode), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetShaderConstantMode_0__LTCG_eax1", xbox::EMUPATCH(D3DDevice_SetShaderConstantMode_0__LTCG_eax1), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetSoftDisplayFilter", xbox::EMUPATCH(D3DDevice_SetSoftDisplayFilter), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetStipple", xbox::EMUPATCH(D3DDevice_SetStipple), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetStreamSource", xbox::EMUPATCH(D3DDevice_SetStreamSource), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetStreamSource_0__LTCG_eax_StreamNumber_edi_pStreamData_ebx_Stride", xbox::EMUPATCH(D3DDevice_SetStreamSource_0__LTCG_eax_StreamNumber_edi_pStreamData_ebx_Stride), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetStreamSource_4", xbox::EMUPATCH(D3DDevice_SetStreamSource_4), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetStreamSource_8", xbox::EMUPATCH(D3DDevice_SetStreamSource_8), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetStreamSource_8__LTCG_edx_StreamNumber", xbox::EMUPATCH(D3DDevice_SetStreamSource_8__LTCG_edx_StreamNumber), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetSwapCallback", xbox::EMUPATCH(D3DDevice_SetSwapCallback), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetTexture", xbox::EMUPATCH(D3DDevice_SetTexture), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetTexture_4__LTCG_eax_pTexture", xbox::EMUPATCH(D3DDevice_SetTexture_4__LTCG_eax_pTexture), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetTexture_4", xbox::EMUPATCH(D3DDevice_SetTexture_4), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetTransform", xbox::EMUPATCH(D3DDevice_SetTransform), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetTransform_0__LTCG_eax1_edx2", xbox::EMUPATCH(D3DDevice_SetTransform_0__LTCG_eax1_edx2), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexData2f", xbox::EMUPATCH(D3DDevice_SetVertexData2f), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexData2s", xbox::EMUPATCH(D3DDevice_SetVertexData2s), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexData4f", xbox::EMUPATCH(D3DDevice_SetVertexData4f), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexData4f_16", xbox::EMUPATCH(D3DDevice_SetVertexData4f_16), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexData4s", xbox::EMUPATCH(D3DDevice_SetVertexData4s), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexData4ub", xbox::EMUPATCH(D3DDevice_SetVertexData4ub), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexDataColor", xbox::EMUPATCH(D3DDevice_SetVertexDataColor), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexShader", xbox::EMUPATCH(D3DDevice_SetVertexShader), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexShader_0", xbox::EMUPATCH(D3DDevice_SetVertexShader_0), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexShaderConstant", xbox::EMUPATCH(D3DDevice_SetVertexShaderConstant), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexShaderConstant1", xbox::EMUPATCH(D3DDevice_SetVertexShaderConstant1), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexShaderConstant1Fast", xbox::EMUPATCH(D3DDevice_SetVertexShaderConstant1Fast), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexShaderConstant4", xbox::EMUPATCH(D3DDevice_SetVertexShaderConstant4), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexShaderConstantNotInline", xbox::EMUPATCH(D3DDevice_SetVertexShaderConstantNotInline), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexShaderConstantNotInlineFast", xbox::EMUPATCH(D3DDevice_SetVertexShaderConstantNotInlineFast), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexShaderConstant_8", xbox::EMUPATCH(D3DDevice_SetVertexShaderConstant_8), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVertexShaderInput", xbox::EMUPATCH(D3DDevice_SetVertexShaderInput), PATCH_HLE_D3D), //PATCH_ENTRY("D3DDevice_SetVertexShaderInputDirect", xbox::EMUPATCH(D3DDevice_SetVertexShaderInputDirect), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetVerticalBlankCallback", xbox::EMUPATCH(D3DDevice_SetVerticalBlankCallback), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SetViewport", xbox::EMUPATCH(D3DDevice_SetViewport), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_Swap", xbox::EMUPATCH(D3DDevice_Swap), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_Swap_0", xbox::EMUPATCH(D3DDevice_Swap_0), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_SwitchTexture", xbox::EMUPATCH(D3DDevice_SwitchTexture), PATCH_HLE_D3D), PATCH_ENTRY("D3DDevice_UpdateOverlay", xbox::EMUPATCH(D3DDevice_UpdateOverlay), PATCH_HLE_D3D), PATCH_ENTRY("D3DResource_BlockUntilNotBusy", xbox::EMUPATCH(D3DResource_BlockUntilNotBusy), PATCH_HLE_D3D), PATCH_ENTRY("D3D_BlockOnTime", xbox::EMUPATCH(D3D_BlockOnTime), PATCH_HLE_D3D), PATCH_ENTRY("D3D_BlockOnTime_4", xbox::EMUPATCH(D3D_BlockOnTime_4), PATCH_HLE_D3D), PATCH_ENTRY("D3D_CommonSetRenderTarget", xbox::EMUPATCH(D3D_CommonSetRenderTarget), PATCH_HLE_D3D), PATCH_ENTRY("D3D_DestroyResource", xbox::EMUPATCH(D3D_DestroyResource), PATCH_HLE_D3D), PATCH_ENTRY("D3D_DestroyResource__LTCG", xbox::EMUPATCH(D3D_DestroyResource__LTCG), PATCH_HLE_D3D), PATCH_ENTRY("D3D_LazySetPointParams", xbox::EMUPATCH(D3D_LazySetPointParams), PATCH_HLE_D3D),<|fim▁hole|> PATCH_ENTRY("Direct3D_CreateDevice", xbox::EMUPATCH(Direct3D_CreateDevice), PATCH_HLE_D3D), PATCH_ENTRY("Direct3D_CreateDevice_16__LTCG_eax_BehaviorFlags_ebx_ppReturnedDeviceInterface", xbox::EMUPATCH(Direct3D_CreateDevice_16__LTCG_eax_BehaviorFlags_ebx_ppReturnedDeviceInterface), PATCH_HLE_D3D), PATCH_ENTRY("Direct3D_CreateDevice_16__LTCG_eax_BehaviorFlags_ecx_ppReturnedDeviceInterface", xbox::EMUPATCH(Direct3D_CreateDevice_16__LTCG_eax_BehaviorFlags_ecx_ppReturnedDeviceInterface), PATCH_HLE_D3D), PATCH_ENTRY("Direct3D_CreateDevice_4", xbox::EMUPATCH(Direct3D_CreateDevice_4), PATCH_HLE_D3D), PATCH_ENTRY("Lock2DSurface", xbox::EMUPATCH(Lock2DSurface), PATCH_HLE_D3D), PATCH_ENTRY("Lock3DSurface", xbox::EMUPATCH(Lock3DSurface), PATCH_HLE_D3D), // DSOUND PATCH_ENTRY("CDirectSound3DCalculator_Calculate3D", xbox::EMUPATCH(CDirectSound3DCalculator_Calculate3D), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSound3DCalculator_GetVoiceData", xbox::EMUPATCH(CDirectSound3DCalculator_GetVoiceData), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_AddRef", xbox::EMUPATCH(CDirectSoundStream_AddRef), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_Discontinuity", xbox::EMUPATCH(CDirectSoundStream_Discontinuity), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_Flush", xbox::EMUPATCH(CDirectSoundStream_Flush), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_FlushEx", xbox::EMUPATCH(CDirectSoundStream_FlushEx), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_GetInfo", xbox::EMUPATCH(CDirectSoundStream_GetInfo), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_GetStatus__r1", xbox::EMUPATCH(CDirectSoundStream_GetStatus__r1), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_GetStatus__r2", xbox::EMUPATCH(CDirectSoundStream_GetStatus__r2), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_GetVoiceProperties", xbox::EMUPATCH(CDirectSoundStream_GetVoiceProperties), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_Pause", xbox::EMUPATCH(CDirectSoundStream_Pause), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_PauseEx", xbox::EMUPATCH(CDirectSoundStream_PauseEx), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_Process", xbox::EMUPATCH(CDirectSoundStream_Process), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_Release", xbox::EMUPATCH(CDirectSoundStream_Release), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetAllParameters", xbox::EMUPATCH(CDirectSoundStream_SetAllParameters), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetConeAngles", xbox::EMUPATCH(CDirectSoundStream_SetConeAngles), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetConeOrientation", xbox::EMUPATCH(CDirectSoundStream_SetConeOrientation), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetConeOutsideVolume", xbox::EMUPATCH(CDirectSoundStream_SetConeOutsideVolume), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetDistanceFactor", xbox::EMUPATCH(CDirectSoundStream_SetDistanceFactor), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetDopplerFactor", xbox::EMUPATCH(CDirectSoundStream_SetDopplerFactor), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetEG", xbox::EMUPATCH(CDirectSoundStream_SetEG), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetFilter", xbox::EMUPATCH(CDirectSoundStream_SetFilter), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetFormat", xbox::EMUPATCH(CDirectSoundStream_SetFormat), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetFrequency", xbox::EMUPATCH(CDirectSoundStream_SetFrequency), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetHeadroom", xbox::EMUPATCH(CDirectSoundStream_SetHeadroom), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetI3DL2Source", xbox::EMUPATCH(CDirectSoundStream_SetI3DL2Source), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetLFO", xbox::EMUPATCH(CDirectSoundStream_SetLFO), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetMaxDistance", xbox::EMUPATCH(CDirectSoundStream_SetMaxDistance), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetMinDistance", xbox::EMUPATCH(CDirectSoundStream_SetMinDistance), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetMixBinVolumes_12", xbox::EMUPATCH(CDirectSoundStream_SetMixBinVolumes_12), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetMixBinVolumes_8", xbox::EMUPATCH(CDirectSoundStream_SetMixBinVolumes_8), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetMixBins", xbox::EMUPATCH(CDirectSoundStream_SetMixBins), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetMode", xbox::EMUPATCH(CDirectSoundStream_SetMode), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetOutputBuffer", xbox::EMUPATCH(CDirectSoundStream_SetOutputBuffer), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetPitch", xbox::EMUPATCH(CDirectSoundStream_SetPitch), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetPosition", xbox::EMUPATCH(CDirectSoundStream_SetPosition), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetRolloffCurve", xbox::EMUPATCH(CDirectSoundStream_SetRolloffCurve), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetRolloffFactor", xbox::EMUPATCH(CDirectSoundStream_SetRolloffFactor), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetVelocity", xbox::EMUPATCH(CDirectSoundStream_SetVelocity), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSoundStream_SetVolume", xbox::EMUPATCH(CDirectSoundStream_SetVolume), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSound_CommitDeferredSettings", xbox::EMUPATCH(CDirectSound_CommitDeferredSettings), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSound_GetSpeakerConfig", xbox::EMUPATCH(CDirectSound_GetSpeakerConfig), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSound_SynchPlayback", xbox::EMUPATCH(CDirectSound_SynchPlayback), PATCH_HLE_DSOUND), PATCH_ENTRY("CMcpxStream_Dummy_0x10", xbox::EMUPATCH(CMcpxStream_Dummy_0x10), PATCH_HLE_DSOUND), PATCH_ENTRY("DirectSoundCreate", xbox::EMUPATCH(DirectSoundCreate), PATCH_HLE_DSOUND), PATCH_ENTRY("DirectSoundCreateBuffer", xbox::EMUPATCH(DirectSoundCreateBuffer), PATCH_HLE_DSOUND), PATCH_ENTRY("DirectSoundCreateStream", xbox::EMUPATCH(DirectSoundCreateStream), PATCH_HLE_DSOUND), PATCH_ENTRY("DirectSoundDoWork", xbox::EMUPATCH(DirectSoundDoWork), PATCH_HLE_DSOUND), PATCH_ENTRY("DirectSoundGetSampleTime", xbox::EMUPATCH(DirectSoundGetSampleTime), PATCH_HLE_DSOUND), PATCH_ENTRY("DirectSoundUseFullHRTF", xbox::EMUPATCH(DirectSoundUseFullHRTF), PATCH_HLE_DSOUND), PATCH_ENTRY("DirectSoundUseFullHRTF4Channel", xbox::EMUPATCH(DirectSoundUseFullHRTF4Channel), PATCH_HLE_DSOUND), PATCH_ENTRY("DirectSoundUseLightHRTF", xbox::EMUPATCH(DirectSoundUseLightHRTF), PATCH_HLE_DSOUND), PATCH_ENTRY("DirectSoundUseLightHRTF4Channel", xbox::EMUPATCH(DirectSoundUseLightHRTF4Channel), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_AddRef", xbox::EMUPATCH(IDirectSoundBuffer_AddRef), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_GetCurrentPosition", xbox::EMUPATCH(IDirectSoundBuffer_GetCurrentPosition), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_GetStatus", xbox::EMUPATCH(IDirectSoundBuffer_GetStatus), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_GetVoiceProperties", xbox::EMUPATCH(IDirectSoundBuffer_GetVoiceProperties), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_Lock", xbox::EMUPATCH(IDirectSoundBuffer_Lock), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_Pause", xbox::EMUPATCH(IDirectSoundBuffer_Pause), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_PauseEx", xbox::EMUPATCH(IDirectSoundBuffer_PauseEx), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_Play", xbox::EMUPATCH(IDirectSoundBuffer_Play), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_PlayEx", xbox::EMUPATCH(IDirectSoundBuffer_PlayEx), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_Release", xbox::EMUPATCH(IDirectSoundBuffer_Release), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_Set3DVoiceData", xbox::EMUPATCH(IDirectSoundBuffer_Set3DVoiceData), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetAllParameters", xbox::EMUPATCH(IDirectSoundBuffer_SetAllParameters), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetBufferData", xbox::EMUPATCH(IDirectSoundBuffer_SetBufferData), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetConeAngles", xbox::EMUPATCH(IDirectSoundBuffer_SetConeAngles), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetConeOrientation", xbox::EMUPATCH(IDirectSoundBuffer_SetConeOrientation), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetConeOutsideVolume", xbox::EMUPATCH(IDirectSoundBuffer_SetConeOutsideVolume), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetCurrentPosition", xbox::EMUPATCH(IDirectSoundBuffer_SetCurrentPosition), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetDistanceFactor", xbox::EMUPATCH(IDirectSoundBuffer_SetDistanceFactor), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetDopplerFactor", xbox::EMUPATCH(IDirectSoundBuffer_SetDopplerFactor), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetEG", xbox::EMUPATCH(IDirectSoundBuffer_SetEG), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetFilter", xbox::EMUPATCH(IDirectSoundBuffer_SetFilter), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetFormat", xbox::EMUPATCH(IDirectSoundBuffer_SetFormat), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetFrequency", xbox::EMUPATCH(IDirectSoundBuffer_SetFrequency), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetHeadroom", xbox::EMUPATCH(IDirectSoundBuffer_SetHeadroom), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetI3DL2Source", xbox::EMUPATCH(IDirectSoundBuffer_SetI3DL2Source), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetLFO", xbox::EMUPATCH(IDirectSoundBuffer_SetLFO), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetLoopRegion", xbox::EMUPATCH(IDirectSoundBuffer_SetLoopRegion), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetMaxDistance", xbox::EMUPATCH(IDirectSoundBuffer_SetMaxDistance), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetMinDistance", xbox::EMUPATCH(IDirectSoundBuffer_SetMinDistance), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetMixBinVolumes_12", xbox::EMUPATCH(IDirectSoundBuffer_SetMixBinVolumes_12), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetMixBinVolumes_8", xbox::EMUPATCH(IDirectSoundBuffer_SetMixBinVolumes_8), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetMixBins", xbox::EMUPATCH(IDirectSoundBuffer_SetMixBins), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetMode", xbox::EMUPATCH(IDirectSoundBuffer_SetMode), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetNotificationPositions", xbox::EMUPATCH(IDirectSoundBuffer_SetNotificationPositions), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetOutputBuffer", xbox::EMUPATCH(IDirectSoundBuffer_SetOutputBuffer), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetPitch", xbox::EMUPATCH(IDirectSoundBuffer_SetPitch), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetPlayRegion", xbox::EMUPATCH(IDirectSoundBuffer_SetPlayRegion), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetPosition", xbox::EMUPATCH(IDirectSoundBuffer_SetPosition), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetRolloffCurve", xbox::EMUPATCH(IDirectSoundBuffer_SetRolloffCurve), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetRolloffFactor", xbox::EMUPATCH(IDirectSoundBuffer_SetRolloffFactor), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetVelocity", xbox::EMUPATCH(IDirectSoundBuffer_SetVelocity), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_SetVolume", xbox::EMUPATCH(IDirectSoundBuffer_SetVolume), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_Stop", xbox::EMUPATCH(IDirectSoundBuffer_Stop), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_StopEx", xbox::EMUPATCH(IDirectSoundBuffer_StopEx), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_Unlock", xbox::EMUPATCH(IDirectSoundBuffer_Unlock), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundBuffer_Use3DVoiceData", xbox::EMUPATCH(IDirectSoundBuffer_Use3DVoiceData), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundStream_Set3DVoiceData", xbox::EMUPATCH(IDirectSoundStream_Set3DVoiceData), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundStream_SetEG", xbox::EMUPATCH(IDirectSoundStream_SetEG), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundStream_SetFilter", xbox::EMUPATCH(IDirectSoundStream_SetFilter), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundStream_SetFrequency", xbox::EMUPATCH(IDirectSoundStream_SetFrequency), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundStream_SetHeadroom", xbox::EMUPATCH(IDirectSoundStream_SetHeadroom), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundStream_SetLFO", xbox::EMUPATCH(IDirectSoundStream_SetLFO), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundStream_SetMixBins", xbox::EMUPATCH(IDirectSoundStream_SetMixBins), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundStream_SetPitch", xbox::EMUPATCH(IDirectSoundStream_SetPitch), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundStream_SetVolume", xbox::EMUPATCH(IDirectSoundStream_SetVolume), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSoundStream_Use3DVoiceData", xbox::EMUPATCH(IDirectSoundStream_Use3DVoiceData), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_AddRef", xbox::EMUPATCH(IDirectSound_AddRef), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_CommitDeferredSettings", xbox::EMUPATCH(IDirectSound_CommitDeferredSettings), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_CommitEffectData", xbox::EMUPATCH(IDirectSound_CommitEffectData), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_CreateSoundBuffer", xbox::EMUPATCH(IDirectSound_CreateSoundBuffer), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_CreateSoundStream", xbox::EMUPATCH(IDirectSound_CreateSoundStream), PATCH_HLE_DSOUND), // PATCH_ENTRY("IDirectSound_DownloadEffectsImage", xbox::EMUPATCH(IDirectSound_DownloadEffectsImage), PATCH_HLE_DSOUND), PATCH_ENTRY("CDirectSound_DownloadEffectsImage", xbox::EMUPATCH(CDirectSound_DownloadEffectsImage), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_EnableHeadphones", xbox::EMUPATCH(IDirectSound_EnableHeadphones), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_GetCaps", xbox::EMUPATCH(IDirectSound_GetCaps), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_GetEffectData", xbox::EMUPATCH(IDirectSound_GetEffectData), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_GetOutputLevels", xbox::EMUPATCH(IDirectSound_GetOutputLevels), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_GetSpeakerConfig", xbox::EMUPATCH(IDirectSound_GetSpeakerConfig), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_Release", xbox::EMUPATCH(IDirectSound_Release), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_SetAllParameters", xbox::EMUPATCH(IDirectSound_SetAllParameters), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_SetDistanceFactor", xbox::EMUPATCH(IDirectSound_SetDistanceFactor), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_SetDopplerFactor", xbox::EMUPATCH(IDirectSound_SetDopplerFactor), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_SetEffectData", xbox::EMUPATCH(IDirectSound_SetEffectData), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_SetI3DL2Listener", xbox::EMUPATCH(IDirectSound_SetI3DL2Listener), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_SetMixBinHeadroom", xbox::EMUPATCH(IDirectSound_SetMixBinHeadroom), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_SetOrientation", xbox::EMUPATCH(IDirectSound_SetOrientation), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_SetPosition", xbox::EMUPATCH(IDirectSound_SetPosition), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_SetRolloffFactor", xbox::EMUPATCH(IDirectSound_SetRolloffFactor), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_SetVelocity", xbox::EMUPATCH(IDirectSound_SetVelocity), PATCH_HLE_DSOUND), PATCH_ENTRY("IDirectSound_SynchPlayback", xbox::EMUPATCH(IDirectSound_SynchPlayback), PATCH_HLE_DSOUND), //PATCH_ENTRY("XAudioCreateAdpcmFormat", xbox::EMUPATCH(XAudioCreateAdpcmFormat), PATCH_HLE_DSOUND), // NOTE: Not require to patch PATCH_ENTRY("XAudioDownloadEffectsImage", xbox::EMUPATCH(XAudioDownloadEffectsImage), PATCH_HLE_DSOUND), PATCH_ENTRY("XAudioSetEffectData", xbox::EMUPATCH(XAudioSetEffectData), PATCH_HLE_DSOUND), // OHCI PATCH_ENTRY("XGetDeviceChanges", xbox::EMUPATCH(XGetDeviceChanges), PATCH_HLE_OHCI), PATCH_ENTRY("XGetDeviceEnumerationStatus", xbox::EMUPATCH(XGetDeviceEnumerationStatus), PATCH_HLE_OHCI), PATCH_ENTRY("XGetDevices", xbox::EMUPATCH(XGetDevices), PATCH_HLE_OHCI), PATCH_ENTRY("XInitDevices", xbox::EMUPATCH(XInitDevices), PATCH_HLE_OHCI), PATCH_ENTRY("XInputClose", xbox::EMUPATCH(XInputClose), PATCH_HLE_OHCI), PATCH_ENTRY("XInputGetCapabilities", xbox::EMUPATCH(XInputGetCapabilities), PATCH_HLE_OHCI), PATCH_ENTRY("XInputGetDeviceDescription", xbox::EMUPATCH(XInputGetDeviceDescription), PATCH_HLE_OHCI), PATCH_ENTRY("XInputGetState", xbox::EMUPATCH(XInputGetState), PATCH_HLE_OHCI), PATCH_ENTRY("XInputOpen", xbox::EMUPATCH(XInputOpen), PATCH_HLE_OHCI), PATCH_ENTRY("XInputPoll", xbox::EMUPATCH(XInputPoll), PATCH_HLE_OHCI), PATCH_ENTRY("XInputSetLightgunCalibration", xbox::EMUPATCH(XInputSetLightgunCalibration), PATCH_HLE_OHCI), PATCH_ENTRY("XInputSetState", xbox::EMUPATCH(XInputSetState), PATCH_HLE_OHCI), // XAPI PATCH_ENTRY("ConvertThreadToFiber", xbox::EMUPATCH(ConvertThreadToFiber), PATCH_IS_FIBER), PATCH_ENTRY("CreateFiber", xbox::EMUPATCH(CreateFiber), PATCH_IS_FIBER), PATCH_ENTRY("DeleteFiber", xbox::EMUPATCH(DeleteFiber), PATCH_IS_FIBER), //PATCH_ENTRY("GetExitCodeThread", xbox::EMUPATCH(GetExitCodeThread), PATCH_ALWAYS), //PATCH_ENTRY("GetThreadPriority", xbox::EMUPATCH(GetThreadPriority), PATCH_ALWAYS), PATCH_ENTRY("OutputDebugStringA", xbox::EMUPATCH(OutputDebugStringA), PATCH_ALWAYS), //PATCH_ENTRY("RaiseException", xbox::EMUPATCH(RaiseException), PATCH_ALWAYS), //PATCH_ENTRY("SetThreadPriority", xbox::EMUPATCH(SetThreadPriority), PATCH_ALWAYS), //PATCH_ENTRY("SetThreadPriorityBoost", xbox::EMUPATCH(SetThreadPriorityBoost), PATCH_ALWAYS), PATCH_ENTRY("SignalObjectAndWait", xbox::EMUPATCH(SignalObjectAndWait), PATCH_ALWAYS), PATCH_ENTRY("SwitchToFiber", xbox::EMUPATCH(SwitchToFiber), PATCH_IS_FIBER), PATCH_ENTRY("XMountMUA", xbox::EMUPATCH(XMountMUA), PATCH_ALWAYS), PATCH_ENTRY("XMountMURootA", xbox::EMUPATCH(XMountMURootA), PATCH_ALWAYS), //PATCH_ENTRY("XSetProcessQuantumLength", xbox::EMUPATCH(XSetProcessQuantumLength), PATCH_ALWAYS), //PATCH_ENTRY("timeKillEvent", xbox::EMUPATCH(timeKillEvent), PATCH_ALWAYS), //PATCH_ENTRY("timeSetEvent", xbox::EMUPATCH(timeSetEvent), PATCH_ALWAYS), PATCH_ENTRY("XReadMUMetaData", xbox::EMUPATCH(XReadMUMetaData), PATCH_ALWAYS), PATCH_ENTRY("XUnmountMU", xbox::EMUPATCH(XUnmountMU), PATCH_ALWAYS), }; std::unordered_map<std::string, subhook::Hook> g_FunctionHooks; inline bool TitleRequiresUnpatchedFibers() { static bool detected = false; static bool result = false; // Prevent running the check every time this function is called if (detected) { return result; } // Array of known games that require the fiber unpatch hack DWORD titleIds[] = { 0x46490002, // Futurama PAL 0x56550008, // Futurama NTSC 0 }; DWORD* pTitleId = &titleIds[0]; while (*pTitleId != 0) { if (g_pCertificate->dwTitleId == *pTitleId) { result = true; break; } pTitleId++; } detected = true; return result; } // NOTE: EmuInstallPatch do not get to be in XbSymbolDatabase, do the patches in Cxbx project only. inline void EmuInstallPatch(const std::string FunctionName, const xbox::addr_xt FunctionAddr) { auto it = g_PatchTable.find(FunctionName); if (it == g_PatchTable.end()) { return; } auto patch = it->second; if ((patch.flags & PATCH_HLE_D3D) && bLLE_GPU) { printf("HLE: %s: Skipped (LLE GPU Enabled)\n", FunctionName.c_str()); return; } if ((patch.flags & PATCH_HLE_DSOUND) && bLLE_APU) { printf("HLE: %s: Skipped (LLE APU Enabled)\n", FunctionName.c_str()); return; } if ((patch.flags & PATCH_HLE_OHCI) && bLLE_USB) { printf("HLE: %s: Skipped (LLE OHCI Enabled)\n", FunctionName.c_str()); return; } // HACK: Some titles require unpatched Fibers, otherwise they enter an infinite loop // while others require patched Fibers, otherwise they outright crash // This is caused by limitations of Direct Code Execution and Cxbx-R's threading model if ((patch.flags & PATCH_IS_FIBER) && TitleRequiresUnpatchedFibers()) { printf("HLE: %s: Skipped (Game requires unpatched Fibers)\n", FunctionName.c_str()); return; } g_FunctionHooks[FunctionName].Install((void*)(FunctionAddr), (void*)patch.patchFunc); printf("HLE: %s Patched\n", FunctionName.c_str()); } void EmuInstallPatches() { for (const auto& it : g_SymbolAddresses) { EmuInstallPatch(it.first, it.second); } LookupTrampolinesD3D(); LookupTrampolinesXAPI(); } void* GetPatchedFunctionTrampoline(const std::string functionName) { auto it = g_FunctionHooks.find(functionName); if (it != g_FunctionHooks.end()) { auto trampoline = it->second.GetTrampoline(); if (trampoline == nullptr) { EmuLogEx(CXBXR_MODULE::HLE, LOG_LEVEL::WARNING, "Failed to get XB_Trampoline for %s", functionName.c_str()); } return trampoline; } return nullptr; }<|fim▁end|>
PATCH_ENTRY("D3D_SetCommonDebugRegisters", xbox::EMUPATCH(D3D_SetCommonDebugRegisters), PATCH_HLE_D3D),
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># flake8: noqa from .account import AccountViewSet from .allocation import AllocationViewSet from .allocation_source import AllocationSourceViewSet from .boot_script import BootScriptViewSet from .base import BaseRequestViewSet from .credential import CredentialViewSet from .email_template import EmailTemplateViewSet from .event import EventViewSet from .group import GroupViewSet from .help_link import HelpLinkViewSet from .identity import IdentityViewSet from .identity_membership import IdentityMembershipViewSet from .image import ImageViewSet from .image_bookmark import ImageBookmarkViewSet from .image_tag import ImageTagViewSet from .image_version import ImageVersionViewSet from .image_version_boot_script import ImageVersionBootScriptViewSet from .image_version_membership import ImageVersionMembershipViewSet from .image_version_license import ImageVersionLicenseViewSet from .instance import InstanceViewSet from .instance_tag import InstanceTagViewSet from .instance_history import InstanceStatusHistoryViewSet from .instance_action import InstanceActionViewSet from .license import LicenseViewSet<|fim▁hole|>from .project import ProjectViewSet from .project_application import ProjectApplicationViewSet from .project_link import ProjectExternalLinkViewSet from .project_instance import ProjectInstanceViewSet from .project_volume import ProjectVolumeViewSet from .provider import ProviderViewSet from .provider_machine import ProviderMachineViewSet from .provider_type import ProviderTypeViewSet from .quota import QuotaViewSet from .resource_request import ResourceRequestViewSet from .reporting import ReportingViewSet from .size import SizeViewSet from .status_type import StatusTypeViewSet from .email import InstanceSupportEmailViewSet, VolumeSupportEmailViewSet, FeedbackEmailViewSet, ResourceEmailViewSet from .emulate import TokenEmulateViewSet, SessionEmulateViewSet from .tag import TagViewSet from .token import TokenViewSet from .token_update import TokenUpdateViewSet from .user import UserViewSet from .volume import VolumeViewSet from .metric import MetricViewSet from .ssh_key import SSHKeyViewSet<|fim▁end|>
from .link import ExternalLinkViewSet from .machine_request import MachineRequestViewSet from .maintenance_record import MaintenanceRecordViewSet from .platform_type import PlatformTypeViewSet
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import Router = require('koa-router');<|fim▁hole|> const router = new Router({ prefix: '/toolkit' }); router.get('/example', routes.example); router.post('/upload', validator(schema.upload), routes.upload); export default router;<|fim▁end|>
import * as schema from './schema'; import * as routes from './routes'; import validator from '../../utils/validator';
<|file_name|>VcsConsoleLine.java<|end_file_name|><|fim▁begin|>// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.vcs; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.util.containers.ContainerUtil; import consulo.util.lang.Pair; import consulo.util.lang.StringUtil; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.Collections; import java.util.List; public final class VcsConsoleLine { private final List<Pair<String, ConsoleViewContentType>> myChunks; private VcsConsoleLine(@Nonnull List<Pair<String, ConsoleViewContentType>> chunks) { myChunks = chunks; } public void print(@Nonnull ConsoleView console) { ConsoleViewContentType lastType = ConsoleViewContentType.NORMAL_OUTPUT; for (Pair<String, ConsoleViewContentType> chunk : myChunks) { console.print(chunk.first, chunk.second); lastType = chunk.second; } console.print("\n", lastType); }<|fim▁hole|> public static VcsConsoleLine create(@Nullable String message, @Nonnull ConsoleViewContentType contentType) { return create(Collections.singletonList(Pair.create(message, contentType))); } @Nullable public static VcsConsoleLine create(@Nonnull List<Pair<String, ConsoleViewContentType>> lineChunks) { List<Pair<String, ConsoleViewContentType>> chunks = ContainerUtil.filter(lineChunks, it -> !StringUtil.isEmptyOrSpaces(it.first)); if (chunks.isEmpty()) return null; return new VcsConsoleLine(chunks); } }<|fim▁end|>
@Nullable
<|file_name|>destringify.js<|end_file_name|><|fim▁begin|>var __window = window; var destringify = function(date) { if (date != null && 'string' === typeof(date)) return new Date(date); return date;<|fim▁hole|> if (!date) return null; return dateFormat(date, "UTC:yyyy-mm-dd'T'HH:MM:ss'Z'"); } var stringreplaceall = function(target, search, replacement) { return target.split(search).join(replacement); }; var grabiframecontentdocument = function(id) { return document.getElementById(id).contentWindow.document; }; var grabcontentdocument = function(selector, parent) { return jQuery(selector, parent).get(0).contentWindow.document; };<|fim▁end|>
} var stringify = function(date) {
<|file_name|>removeOldDellReports.py<|end_file_name|><|fim▁begin|>################################################################################ # # This program is part of the DellMon Zenpack for Zenoss. # Copyright (C) 2008, 2009, 2010 Egor Puzanov. # # This program can be used under the GNU General Public License version 2 # You can find full information here: http://www.zenoss.com/oss<|fim▁hole|> Delete the previous DRAC and Storage Controllers reports. $Id:$ ''' from Products.ZenModel.ZenPack import ZenPackMigration from Products.ZenModel.migrate.Migrate import Version class removeOldDellReports(ZenPackMigration): version = Version(2, 3) def migrate(self, pack): if hasattr(pack.dmd.Reports, 'Device Reports'): devReports = pack.dmd.Reports['Device Reports'] if hasattr(devReports, 'Dell DRAC Controllers'): devReports._delObject('Dell DRAC Controllers') if hasattr(devReports, 'Dell Storage Controllers'): devReports._delObject('Dell Storage Controllers') removeOldDellReports()<|fim▁end|>
# ################################################################################ __doc__='''
<|file_name|>pickerController.js<|end_file_name|><|fim▁begin|>app.controller('PickerController', function ($scope, $modalInstance, itemColor) { $scope.showCarrierColors = true; $scope.brandColors = [ { name: 'Brand Blue', hex: '#276681' }, { name: 'Brand Green', hex: '#66b245' }, { name: 'Brand Blue Desaturated', hex: '#417c95' }, { name: 'Brand Green Desaturated', hex: '#75b86f' }, { name: 'Bluest', hex: '#5baebf' }, { name: 'Blue', hex: '#66b7bb' }, { name: 'Blue Green', hex: '#76beb6' }, { name: 'Green Blue', hex: '#84c6ae' }, { name: 'Green', hex: '#96cca7' }, { name: 'Greenest', hex: '#a4d49a' }, { name: 'Level 2 Blend', hex: '#7fced8' }, { name: 'Level 2 Blend', hex: '#8fd4d6' }, { name: 'Level 2 Blend', hex: '#a5d7d3' }, { name: 'Level 2 Blend', hex: '#b5dcce' }, { name: 'Level 2 Blend', hex: '#bfe0ca' }, { name: 'Level 2 Blend', hex: '#c8e5c2' }, { name: 'Level 3 Blend', hex: '#b0e2e7' }, { name: 'Level 3 Blend', hex: '#bce5e6' }, { name: 'Level 3 Blend', hex: '#c8e6e4' }, { name: 'Level 3 Blend', hex: '#d3eae2' }, { name: 'Level 3 Blend', hex: '#d8ecdf' }, { name: 'Level 3 Blend', hex: '#ddefda' }, { name: 'Illustration Stroke Darkest', hex: '#54636a' }, { name: 'Illustration Stroke Medium', hex: '#7f8a8f' }, { name: 'Illustration Stroke Light', hex: '#a9b1b4' }, { name: 'Illustration Stroke Lightest', hex: '#d4d8da' }, { name: 'Yellow', hex: '#f5db77' }, { name: 'Medium Yellow', hex: '#f8e499' }, { name: 'Light Yellow', hex: '#faedbb' }, { <|fim▁hole|> name: 'Tang', hex: '#f38871' }, { name: 'Medium Tang', hex: '#f7a593' }, { name: 'Light Tang', hex: '#fbc1b4' }, { name: 'Lightest Tang', hex: '#ffded6' }, { name: 'Black', hex: '#555555' }, { name: 'Dark Gray', hex: '#797979' }, { name: 'Medium Gray', hex: '#9c9c9c' }, { name: 'Light Gray', hex: '#c0c0c0' }, { name: 'Lightest Gray', hex: '#e3e3e3' }, { name: 'Off White', hex: '#f9f9f9' } ]; $scope.carrierColors = [ { carrier: 'Verizon', hex: '#ca5b59' }, { carrier: 'AT&T', hex: '#5694b4' }, { carrier: 'T-Mobile', hex: '#d45da0' }, { carrier: 'Sprint', hex: '#e9b444' }, { carrier: 'Cricket', hex: '#008752' }, { carrier: 'Cricket', hex: '#439474' }, { carrier: 'MetroPCS', hex: '#6764b3' }, { carrier: 'EE', hex: '#2e9a9c' }, { carrier: 'O2', hex: '#2566a8' }, { carrier: 'Orange', hex: '#ff6c42' }, { carrier: 'Three', hex: '#333333' }, { carrier: 'Vodafone', hex: '#eb5247' }, { carrier: 'Bell', hex: '#2876a5' }, { carrier: 'Leap', hex: '#330066' }, { carrier: 'Rogers', hex: '#d63e3e' }, { carrier: 'Telus', hex: '#4e5cb5' }, { carrier: 'Videotron', hex: '#fcc622' }, { carrier: 'Wind', hex: '#ec7c23' }, { carrier: 'Tie', hex: '#999999' } ] $scope.ok = function () { $modalInstance.close(itemColor); }; $scope.closeModal = function(color) { $modalInstance.close(color); } });<|fim▁end|>
name: 'Lightest Yellow', hex: '#fdf6dd' }, {
<|file_name|>MenuOpenRounded.js<|end_file_name|><|fim▁begin|><|fim▁hole|>import * as React from 'react'; import createSvgIcon from './utils/createSvgIcon'; export default createSvgIcon( <path d="M4 18h11c.55 0 1-.45 1-1s-.45-1-1-1H4c-.55 0-1 .45-1 1s.45 1 1 1zm0-5h8c.55 0 1-.45 1-1s-.45-1-1-1H4c-.55 0-1 .45-1 1s.45 1 1 1zM3 7c0 .55.45 1 1 1h11c.55 0 1-.45 1-1s-.45-1-1-1H4c-.55 0-1 .45-1 1zm17.3 7.88L17.42 12l2.88-2.88c.39-.39.39-1.02 0-1.41a.9959.9959 0 00-1.41 0L15.3 11.3c-.39.39-.39 1.02 0 1.41l3.59 3.59c.39.39 1.02.39 1.41 0 .38-.39.39-1.03 0-1.42z" /> , 'MenuOpenRounded');<|fim▁end|>
<|file_name|>char.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Character manipulation (`char` type, Unicode Scalar Value) //! //! This module provides the `CharExt` trait, as well as its //! implementation for the primitive `char` type, in order to allow //! basic character manipulation. //! //! A `char` actually represents a //! *[Unicode Scalar //! Value](http://www.unicode.org/glossary/#unicode_scalar_value)*, as it can //! contain any Unicode code point except high-surrogate and low-surrogate code //! points. //! //! As such, only values in the ranges \[0x0,0xD7FF\] and \[0xE000,0x10FFFF\] //! (inclusive) are allowed. A `char` can always be safely cast to a `u32`; //! however the converse is not always true due to the above range limits //! and, as such, should be performed via the `from_u32` function. #![stable(feature = "rust1", since = "1.0.0")] #![doc(primitive = "char")] use core::char::CharExt as C; use core::option::Option::{self, Some}; use core::iter::Iterator; use tables::{derived_property, property, general_category, conversions, charwidth}; // stable reexports pub use core::char::{MAX, from_u32, from_digit, EscapeUnicode, EscapeDefault}; // unstable reexports #[allow(deprecated)] pub use normalize::{decompose_canonical, decompose_compatible, compose}; #[allow(deprecated)] pub use tables::normalization::canonical_combining_class; pub use tables::UNICODE_VERSION; /// An iterator over the lowercase mapping of a given character, returned from /// the [`to_lowercase` method](../primitive.char.html#method.to_lowercase) on /// characters. #[stable(feature = "rust1", since = "1.0.0")] pub struct ToLowercase(Option<char>); #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for ToLowercase { type Item = char; fn next(&mut self) -> Option<char> { self.0.take() } } /// An iterator over the uppercase mapping of a given character, returned from /// the [`to_uppercase` method](../primitive.char.html#method.to_uppercase) on /// characters. #[stable(feature = "rust1", since = "1.0.0")] pub struct ToUppercase(Option<char>); #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for ToUppercase { type Item = char; fn next(&mut self) -> Option<char> { self.0.take() } } #[stable(feature = "rust1", since = "1.0.0")] #[lang = "char"] impl char { /// Checks if a `char` parses as a numeric digit in the given radix. /// /// Compared to `is_numeric()`, this function only recognizes the characters /// `0-9`, `a-z` and `A-Z`. /// /// # Return value /// /// Returns `true` if `c` is a valid digit under `radix`, and `false` /// otherwise. /// /// # Panics /// /// Panics if given a radix > 36. /// /// # Examples /// /// ``` /// let c = '1'; /// /// assert!(c.is_digit(10)); /// /// assert!('f'.is_digit(16)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn is_digit(self, radix: u32) -> bool { C::is_digit(self, radix) } /// Converts a character to the corresponding digit. /// /// # Return value /// /// If `c` is between '0' and '9', the corresponding value between 0 and /// 9. If `c` is 'a' or 'A', 10. If `c` is 'b' or 'B', 11, etc. Returns /// none if the character does not refer to a digit in the given radix. /// /// # Panics /// /// Panics if given a radix outside the range [0..36]. /// /// # Examples /// /// ``` /// let c = '1'; /// /// assert_eq!(c.to_digit(10), Some(1)); /// /// assert_eq!('f'.to_digit(16), Some(15)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn to_digit(self, radix: u32) -> Option<u32> { C::to_digit(self, radix) } /// Returns an iterator that yields the hexadecimal Unicode escape of a /// character, as `char`s. /// /// All characters are escaped with Rust syntax of the form `\\u{NNNN}` /// where `NNNN` is the shortest hexadecimal representation of the code /// point. /// /// # Examples /// /// ``` /// for i in '❤'.escape_unicode() { /// println!("{}", i); /// } /// ``` /// /// This prints: /// /// ```text /// \ /// u /// { /// 2 /// 7 /// 6 /// 4 /// } /// ``` /// /// Collecting into a `String`: /// /// ``` /// let heart: String = '❤'.escape_unicode().collect(); /// /// assert_eq!(heart, r"\u{2764}"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn escape_unicode(self) -> EscapeUnicode { C::escape_unicode(self) } /// Returns an iterator that yields the 'default' ASCII and /// C++11-like literal escape of a character, as `char`s. /// /// The default is chosen with a bias toward producing literals that are /// legal in a variety of languages, including C++11 and similar C-family /// languages. The exact rules are: /// /// * Tab, CR and LF are escaped as '\t', '\r' and '\n' respectively. /// * Single-quote, double-quote and backslash chars are backslash- /// escaped. /// * Any other chars in the range [0x20,0x7e] are not escaped. /// * Any other chars are given hex Unicode escapes; see `escape_unicode`. /// /// # Examples /// /// ``` /// for i in '"'.escape_default() { /// println!("{}", i); /// } /// ``` /// /// This prints: /// /// ```text /// \ /// " /// ``` /// /// Collecting into a `String`: /// /// ``` /// let quote: String = '"'.escape_default().collect(); /// /// assert_eq!(quote, "\\\""); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn escape_default(self) -> EscapeDefault { C::escape_default(self) } /// Returns the number of bytes this character would need if encoded in /// UTF-8. /// /// # Examples /// /// ``` /// let n = 'ß'.len_utf8(); /// /// assert_eq!(n, 2); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn len_utf8(self) -> usize { C::len_utf8(self) } /// Returns the number of 16-bit code units this character would need if /// encoded in UTF-16. /// /// # Examples /// /// ``` /// let n = 'ß'.len_utf16(); /// /// assert_eq!(n, 1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn len_utf16(self) -> usize { C::len_utf16(self) } /// Encodes this character as UTF-8 into the provided byte buffer, and then /// returns the number of bytes written. /// /// If the buffer is not large enough, nothing will be written into it and a /// `None` will be returned. A buffer of length four is large enough to /// encode any `char`. /// /// # Examples /// /// In both of these examples, 'ß' takes two bytes to encode. ///<|fim▁hole|> /// ``` /// # #![feature(unicode)] /// let mut b = [0; 2]; /// /// let result = 'ß'.encode_utf8(&mut b); /// /// assert_eq!(result, Some(2)); /// ``` /// /// A buffer that's too small: /// /// ``` /// # #![feature(unicode)] /// let mut b = [0; 1]; /// /// let result = 'ß'.encode_utf8(&mut b); /// /// assert_eq!(result, None); /// ``` #[unstable(feature = "unicode", reason = "pending decision about Iterator/Writer/Reader")] pub fn encode_utf8(self, dst: &mut [u8]) -> Option<usize> { C::encode_utf8(self, dst) } /// Encodes this character as UTF-16 into the provided `u16` buffer, and /// then returns the number of `u16`s written. /// /// If the buffer is not large enough, nothing will be written into it and a /// `None` will be returned. A buffer of length 2 is large enough to encode /// any `char`. /// /// # Examples /// /// In both of these examples, 'ß' takes one `u16` to encode. /// /// ``` /// # #![feature(unicode)] /// let mut b = [0; 1]; /// /// let result = 'ß'.encode_utf16(&mut b); /// /// assert_eq!(result, Some(1)); /// ``` /// /// A buffer that's too small: /// /// ``` /// # #![feature(unicode)] /// let mut b = [0; 0]; /// /// let result = 'ß'.encode_utf8(&mut b); /// /// assert_eq!(result, None); /// ``` #[unstable(feature = "unicode", reason = "pending decision about Iterator/Writer/Reader")] pub fn encode_utf16(self, dst: &mut [u16]) -> Option<usize> { C::encode_utf16(self, dst) } /// Returns whether the specified character is considered a Unicode /// alphabetic code point. #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn is_alphabetic(self) -> bool { match self { 'a' ... 'z' | 'A' ... 'Z' => true, c if c > '\x7f' => derived_property::Alphabetic(c), _ => false } } /// Returns whether the specified character satisfies the 'XID_Start' /// Unicode property. /// /// 'XID_Start' is a Unicode Derived Property specified in /// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications), /// mostly similar to ID_Start but modified for closure under NFKx. #[unstable(feature = "unicode", reason = "mainly needed for compiler internals")] #[inline] pub fn is_xid_start(self) -> bool { derived_property::XID_Start(self) } /// Returns whether the specified `char` satisfies the 'XID_Continue' /// Unicode property. /// /// 'XID_Continue' is a Unicode Derived Property specified in /// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications), /// mostly similar to 'ID_Continue' but modified for closure under NFKx. #[unstable(feature = "unicode", reason = "mainly needed for compiler internals")] #[inline] pub fn is_xid_continue(self) -> bool { derived_property::XID_Continue(self) } /// Indicates whether a character is in lowercase. /// /// This is defined according to the terms of the Unicode Derived Core /// Property `Lowercase`. #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn is_lowercase(self) -> bool { match self { 'a' ... 'z' => true, c if c > '\x7f' => derived_property::Lowercase(c), _ => false } } /// Indicates whether a character is in uppercase. /// /// This is defined according to the terms of the Unicode Derived Core /// Property `Uppercase`. #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn is_uppercase(self) -> bool { match self { 'A' ... 'Z' => true, c if c > '\x7f' => derived_property::Uppercase(c), _ => false } } /// Indicates whether a character is whitespace. /// /// Whitespace is defined in terms of the Unicode Property `White_Space`. #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn is_whitespace(self) -> bool { match self { ' ' | '\x09' ... '\x0d' => true, c if c > '\x7f' => property::White_Space(c), _ => false } } /// Indicates whether a character is alphanumeric. /// /// Alphanumericness is defined in terms of the Unicode General Categories /// 'Nd', 'Nl', 'No' and the Derived Core Property 'Alphabetic'. #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn is_alphanumeric(self) -> bool { self.is_alphabetic() || self.is_numeric() } /// Indicates whether a character is a control code point. /// /// Control code points are defined in terms of the Unicode General /// Category `Cc`. #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn is_control(self) -> bool { general_category::Cc(self) } /// Indicates whether the character is numeric (Nd, Nl, or No). #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn is_numeric(self) -> bool { match self { '0' ... '9' => true, c if c > '\x7f' => general_category::N(c), _ => false } } /// Converts a character to its lowercase equivalent. /// /// The case-folding performed is the common or simple mapping. See /// `to_uppercase()` for references and more information. /// /// # Return value /// /// Returns an iterator which yields the characters corresponding to the /// lowercase equivalent of the character. If no conversion is possible then /// the input character is returned. #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn to_lowercase(self) -> ToLowercase { ToLowercase(Some(conversions::to_lower(self))) } /// Converts a character to its uppercase equivalent. /// /// The case-folding performed is the common or simple mapping: it maps /// one Unicode codepoint to its uppercase equivalent according to the /// Unicode database [1]. The additional [`SpecialCasing.txt`] is not yet /// considered here, but the iterator returned will soon support this form /// of case folding. /// /// A full reference can be found here [2]. /// /// # Return value /// /// Returns an iterator which yields the characters corresponding to the /// uppercase equivalent of the character. If no conversion is possible then /// the input character is returned. /// /// [1]: ftp://ftp.unicode.org/Public/UNIDATA/UnicodeData.txt /// /// [`SpecialCasing.txt`]: ftp://ftp.unicode.org/Public/UNIDATA/SpecialCasing.txt /// /// [2]: http://www.unicode.org/versions/Unicode4.0.0/ch03.pdf#G33992 #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn to_uppercase(self) -> ToUppercase { ToUppercase(Some(conversions::to_upper(self))) } /// Returns this character's displayed width in columns, or `None` if it is a /// control character other than `'\x00'`. /// /// `is_cjk` determines behavior for characters in the Ambiguous category: /// if `is_cjk` is `true`, these are 2 columns wide; otherwise, they are 1. /// In CJK contexts, `is_cjk` should be `true`, else it should be `false`. /// [Unicode Standard Annex #11](http://www.unicode.org/reports/tr11/) /// recommends that these characters be treated as 1 column (i.e., /// `is_cjk` = `false`) if the context cannot be reliably determined. #[deprecated(reason = "use the crates.io `unicode-width` library instead", since = "1.0.0")] #[unstable(feature = "unicode", reason = "needs expert opinion. is_cjk flag stands out as ugly")] pub fn width(self, is_cjk: bool) -> Option<usize> { charwidth::width(self, is_cjk) } }<|fim▁end|>
<|file_name|>SystemStatusPage.java<|end_file_name|><|fim▁begin|>/* * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA 02110-1301, USA. * * For information about the authors of this project Have a look * at the AUTHORS file in the root of this project. */ package net.sourceforge.fullsync.ui; import java.util.Timer; import java.util.TimerTask; import javax.inject.Inject; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.ProgressBar; import org.eclipse.swt.widgets.Shell; class SystemStatusPage extends WizardDialog { private Label totalMemory; private Label maxMemory; private Label freeMemory; private ProgressBar progressBarMemory; private Timer timer; private Composite content; @Inject public SystemStatusPage(Shell shell) { super(shell); } @Override<|fim▁hole|> public String getTitle() { return Messages.getString("SystemStatusPage.Title"); //$NON-NLS-1$ } @Override public String getCaption() { return Messages.getString("SystemStatusPage.Caption"); //$NON-NLS-1$ } @Override public String getDescription() { return Messages.getString("SystemStatusPage.Description"); //$NON-NLS-1$ } @Override public String getIconName() { return null; } @Override public String getImageName() { return null; } @Override public void createContent(final Composite content) { this.content = content; // FIXME: add interesting versions and the system properties used by the launcher,... // TODO: add a way to report a bug here? try { content.setLayout(new GridLayout()); var groupMemory = new Group(content, SWT.NONE); groupMemory.setLayout(new GridLayout(2, false)); groupMemory.setText(Messages.getString("SystemStatusPage.JVMMemory")); //$NON-NLS-1$ progressBarMemory = new ProgressBar(groupMemory, SWT.NONE); var progressBarMemoryLData = new GridData(); progressBarMemoryLData.horizontalAlignment = SWT.FILL; progressBarMemoryLData.horizontalSpan = 2; progressBarMemory.setLayoutData(progressBarMemoryLData); // max memory var labelMaxMemory = new Label(groupMemory, SWT.NONE); labelMaxMemory.setText(Messages.getString("SystemStatusPage.MaxMemory")); //$NON-NLS-1$ maxMemory = new Label(groupMemory, SWT.RIGHT); var maxMemoryLData = new GridData(); maxMemoryLData.horizontalAlignment = SWT.FILL; maxMemory.setLayoutData(maxMemoryLData); // total memory var labelTotalMemory = new Label(groupMemory, SWT.NONE); labelTotalMemory.setText(Messages.getString("SystemStatusPage.TotalMemory")); //$NON-NLS-1$ totalMemory = new Label(groupMemory, SWT.RIGHT); var totalMemoryLData = new GridData(); totalMemoryLData.horizontalAlignment = SWT.FILL; totalMemory.setLayoutData(totalMemoryLData); // free memory var labelFreeMemory = new Label(groupMemory, SWT.NONE); labelFreeMemory.setText(Messages.getString("SystemStatusPage.FreeMemory")); //$NON-NLS-1$ freeMemory = new Label(groupMemory, SWT.RIGHT); freeMemory.setText(""); //$NON-NLS-1$ var freeMemoryLData = new GridData(); freeMemoryLData.horizontalAlignment = SWT.FILL; freeMemory.setLayoutData(freeMemoryLData); // gc button var buttonMemoryGc = new Button(groupMemory, SWT.PUSH | SWT.CENTER); buttonMemoryGc.setText(Messages.getString("SystemStatusPage.CleanUp")); //$NON-NLS-1$ var buttonMemoryGcLData = new GridData(); buttonMemoryGc.addListener(SWT.Selection, e -> System.gc()); buttonMemoryGcLData.horizontalAlignment = SWT.END; buttonMemoryGcLData.horizontalSpan = 2; buttonMemoryGc.setLayoutData(buttonMemoryGcLData); timerFired(); timer = new Timer(true); timer.schedule(new TimerTask() { @Override public void run() { timerFired(); } }, 1000, 1000); } catch (Exception e) { e.printStackTrace(); } } @Override public boolean apply() { return true; } @Override public boolean cancel() { return true; } private void timerFired() { if (!content.isDisposed()) { var display = getDisplay(); if ((null == display) || display.isDisposed()) { timer.cancel(); return; } display.asyncExec(this::updateView); } } private void updateView() { if (!content.isDisposed()) { var rt = Runtime.getRuntime(); var ltotalMemory = rt.totalMemory(); var lmaxMemory = rt.maxMemory(); var lfreeMemory = rt.freeMemory(); totalMemory.setText(UISettings.formatSize(ltotalMemory)); maxMemory.setText(UISettings.formatSize(lmaxMemory)); freeMemory.setText(UISettings.formatSize(lfreeMemory)); progressBarMemory.setMaximum((int) (ltotalMemory / 1024)); progressBarMemory.setSelection((int) ((ltotalMemory - lfreeMemory) / 1024)); content.layout(); } } @Override public void dispose() { timer.cancel(); super.dispose(); } }<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2019 Ruben Gonzalez <[email protected]> // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. <|fim▁hole|> mod imp; glib::glib_wrapper! { pub struct ClaxonDec(ObjectSubclass<imp::ClaxonDec>) @extends gst_audio::AudioDecoder, gst::Element, gst::Object; } unsafe impl Send for ClaxonDec {} unsafe impl Sync for ClaxonDec {} pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> { gst::Element::register( Some(plugin), "claxondec", gst::Rank::Marginal, ClaxonDec::static_type(), ) }<|fim▁end|>
use glib::prelude::*;
<|file_name|>LoadThumbnailTask.java<|end_file_name|><|fim▁begin|>package com.jasonsoft.softwarevideoplayer; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory.Options; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.net.Uri; import android.os.AsyncTask; import android.provider.MediaStore.Video; import android.provider.MediaStore.Video.Thumbnails; import android.view.View; import android.widget.ImageView; import com.jasonsoft.softwarevideoplayer.cache.CacheManager; import com.jasonsoft.softwarevideoplayer.data.AsyncDrawable; import com.jasonsoft.softwarevideoplayer.data.LoadThumbnailParams; import com.jasonsoft.softwarevideoplayer.data.LoadThumbnailResult; import java.lang.ref.WeakReference; public class LoadThumbnailTask extends AsyncTask<LoadThumbnailParams, Void, LoadThumbnailResult> { private Context mContext; private final WeakReference<ImageView> thumbnailViewReference; private long data = 0; public LoadThumbnailTask(Context context, ImageView thumbnailView) { this.mContext = context; this.thumbnailViewReference = new WeakReference<ImageView>(thumbnailView); } <|fim▁hole|> @Override protected LoadThumbnailResult doInBackground(LoadThumbnailParams... params) { data = params[0].origId; Bitmap bitmap = Thumbnails.getThumbnail(mContext.getContentResolver(), data, Thumbnails.MINI_KIND, new Options()); android.util.Log.d("jason", "doInBackground data:" + data); android.util.Log.d("jason", "doInBackground bitmap:" + bitmap); if (data > 0 && bitmap != null) { CacheManager.getInstance().addThumbnailToMemoryCache(String.valueOf(data), bitmap); } if (this.isCancelled()) { return null; } return new LoadThumbnailResult(bitmap); } @Override protected void onPostExecute(LoadThumbnailResult result) { if (isCancelled() || null == result) { return; } final ImageView thumbnailView = thumbnailViewReference.get(); final LoadThumbnailTask loadThumbnailTask = getLoadThumbnailTask(thumbnailView); if (this == loadThumbnailTask && thumbnailView != null) { setThumbnail(result.bitmap, thumbnailView); } } public long getData() { return data; } /** * @param imageView Any imageView * @return Retrieve the currently active work task (if any) associated with this imageView. * null if there is no such task. */ public static LoadThumbnailTask getLoadThumbnailTask(ImageView imageView) { if (imageView != null) { final Drawable drawable = imageView.getDrawable(); if (drawable instanceof AsyncDrawable) { final AsyncDrawable asyncDrawable = (AsyncDrawable) drawable; return asyncDrawable.getLoadThumbnailTask(); } } return null; } void setThumbnail(Bitmap bitmap, ImageView thumbnailView) { thumbnailView.setImageBitmap(bitmap); thumbnailView.setVisibility(View.VISIBLE); } }<|fim▁end|>
<|file_name|>cpower1200_rss.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """<|fim▁hole|>it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from cpower1200 import * import feedparser from sys import argv FEED = 'http://news.google.com.au/news?pz=1&cf=all&ned=au&hl=en&output=rss' d = feedparser.parse(FEED) s = CPower1200(argv[1]) # Define one window at the top of the screen, and one in the lower part of the screen s.send_window(dict(x=0, y=0, h=8, w=64), dict(x=0, y=8, h=8, w=64)) header = s.format_text(d.feed.title, RED, 0) articles = '' for i, article in enumerate(d.entries[:4]): print "entry %d: %s" % (i, article.title) colour = YELLOW if i % 2 == 0 else GREEN articles += s.format_text(article.title + ' ', colour) # send to sign #s.send_text(0, header, effect=EFFECT_NONE) s.send_clock(0, display_year=False, display_month=False, display_day=False, display_hour=True, display_minute=True, display_second=True, multiline=False, red=255,green=0,blue=0) s.send_text(1, articles, speed=10)<|fim▁end|>
RSS Reader for C-Power 1200 Copyright 2010-2012 Michael Farrell <http://micolous.id.au/> This library is free software: you can redistribute it and/or modify
<|file_name|>views.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*- from rest_framework import viewsets from . import serializers, models class FileViewSet(viewsets.ModelViewSet): queryset = models.File.objects.all() serializer_class = serializers.FileSerializer<|fim▁end|>
<|file_name|>tree.js<|end_file_name|><|fim▁begin|>module.exports = /******/ (function(modules) { // webpackBootstrap /******/ // The module cache /******/ var installedModules = {}; /******/ // The require function /******/ function __webpack_require__(moduleId) { /******/ // Check if module is in cache /******/ if(installedModules[moduleId]) /******/ return installedModules[moduleId].exports; /******/ // Create a new module (and put it into the cache) /******/ var module = installedModules[moduleId] = { /******/ exports: {}, /******/ id: moduleId, /******/ loaded: false /******/ }; /******/ // Execute the module function /******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); /******/ // Flag the module as loaded /******/ module.loaded = true; /******/ // Return the exports of the module /******/ return module.exports; /******/ } /******/ // expose the modules object (__webpack_modules__) /******/ __webpack_require__.m = modules; /******/ // expose the module cache /******/ __webpack_require__.c = installedModules; /******/ // __webpack_public_path__ /******/ __webpack_require__.p = "/dist/"; /******/ // Load entry module and return exports /******/ return __webpack_require__(0); /******/ }) /************************************************************************/ /******/ ({ /***/ 0: /***/ function(module, exports, __webpack_require__) { module.exports = __webpack_require__(353); /***/ }, /***/ 3: /***/ function(module, exports) { /* globals __VUE_SSR_CONTEXT__ */ // this module is a runtime utility for cleaner component module output and will // be included in the final webpack user bundle module.exports = function normalizeComponent ( rawScriptExports, compiledTemplate, injectStyles, scopeId, moduleIdentifier /* server only */ ) { var esModule var scriptExports = rawScriptExports = rawScriptExports || {} // ES6 modules interop var type = typeof rawScriptExports.default if (type === 'object' || type === 'function') { esModule = rawScriptExports scriptExports = rawScriptExports.default } // Vue.extend constructor export interop var options = typeof scriptExports === 'function' ? scriptExports.options : scriptExports // render functions if (compiledTemplate) { options.render = compiledTemplate.render options.staticRenderFns = compiledTemplate.staticRenderFns } // scopedId if (scopeId) { options._scopeId = scopeId } var hook if (moduleIdentifier) { // server build hook = function (context) { // 2.3 injection context = context || (this.$vnode && this.$vnode.ssrContext) // 2.2 with runInNewContext: true if (!context && typeof __VUE_SSR_CONTEXT__ !== 'undefined') { context = __VUE_SSR_CONTEXT__ } // inject component styles if (injectStyles) { injectStyles.call(this, context) } // register component module identifier for async chunk inferrence if (context && context._registeredComponents) { context._registeredComponents.add(moduleIdentifier) } } // used by ssr in case component is cached and beforeCreate // never gets called options._ssrRegister = hook } else if (injectStyles) { hook = injectStyles } if (hook) { // inject component registration as beforeCreate hook var existing = options.beforeCreate options.beforeCreate = existing ? [].concat(existing, hook) : [hook] } return { esModule: esModule, exports: scriptExports, options: options } } /***/ }, /***/ 13: /***/ function(module, exports) { module.exports = require("element-ui/lib/mixins/emitter"); /***/ }, /***/ 61: /***/ function(module, exports) { module.exports = require("element-ui/lib/locale"); /***/ }, /***/ 85: /***/ function(module, exports) { module.exports = require("element-ui/lib/transitions/collapse-transition"); /***/ }, /***/ 169: /***/ function(module, exports) { module.exports = require("element-ui/lib/utils/merge"); /***/ }, /***/ 307: /***/ function(module, exports) { module.exports = require("element-ui/lib/checkbox"); /***/ }, /***/ 353: /***/ function(module, exports, __webpack_require__) { 'use strict'; exports.__esModule = true; var _tree = __webpack_require__(354); var _tree2 = _interopRequireDefault(_tree); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /* istanbul ignore next */ _tree2.default.install = function (Vue) { Vue.component(_tree2.default.name, _tree2.default); }; exports.default = _tree2.default; /***/ }, /***/ 354: /***/ function(module, exports, __webpack_require__) { var Component = __webpack_require__(3)( /* script */ __webpack_require__(355), /* template */ __webpack_require__(362), /* styles */ null, /* scopeId */ null, /* moduleIdentifier (server only) */ null ) module.exports = Component.exports /***/ }, /***/ 355: /***/ function(module, exports, __webpack_require__) { 'use strict'; exports.__esModule = true; var _treeStore = __webpack_require__(356); var _treeStore2 = _interopRequireDefault(_treeStore); var _locale = __webpack_require__(61); var _emitter = __webpack_require__(13); var _emitter2 = _interopRequireDefault(_emitter); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = { name: 'ElTree', mixins: [_emitter2.default], components: { ElTreeNode: __webpack_require__(359) }, data: function data() { return { store: null, root: null, currentNode: null }; }, props: { data: { type: Array }, emptyText: { type: String, default: function _default() { return (0, _locale.t)('el.tree.emptyText'); } }, nodeKey: String, checkStrictly: Boolean, defaultExpandAll: Boolean, expandOnClickNode: { type: Boolean, default: true }, autoExpandParent: { type: Boolean, default: true }, defaultCheckedKeys: Array, defaultExpandedKeys: Array, renderContent: Function, showCheckbox: { type: Boolean, default: false }, props: { default: function _default() { return { children: 'children', label: 'label', icon: 'icon' }; } }, lazy: { type: Boolean, default: false }, highlightCurrent: Boolean, currentNodeKey: [String, Number], load: Function, filterNodeMethod: Function, accordion: Boolean, indent: { type: Number, default: 16 } }, computed: { children: { set: function set(value) { this.data = value; }, get: function get() { return this.data; } } }, watch: { defaultCheckedKeys: function defaultCheckedKeys(newVal) { this.store.defaultCheckedKeys = newVal; this.store.setDefaultCheckedKey(newVal); }, defaultExpandedKeys: function defaultExpandedKeys(newVal) { this.store.defaultExpandedKeys = newVal; this.store.setDefaultExpandedKeys(newVal); }, currentNodeKey: function currentNodeKey(newVal) { this.store.setCurrentNodeKey(newVal); this.store.currentNodeKey = newVal; }, data: function data(newVal) { this.store.setData(newVal); } }, methods: { filter: function filter(value) { if (!this.filterNodeMethod) throw new Error('[Tree] filterNodeMethod is required when filter'); this.store.filter(value); }, getNodeKey: function getNodeKey(node, index) { var nodeKey = this.nodeKey; if (nodeKey && node) { return node.data[nodeKey]; } return index; }, getCheckedNodes: function getCheckedNodes(leafOnly) { return this.store.getCheckedNodes(leafOnly); }, getCheckedKeys: function getCheckedKeys(leafOnly) { return this.store.getCheckedKeys(leafOnly); }, setCheckedNodes: function setCheckedNodes(nodes, leafOnly) { if (!this.nodeKey) throw new Error('[Tree] nodeKey is required in setCheckedNodes'); this.store.setCheckedNodes(nodes, leafOnly); }, setCheckedKeys: function setCheckedKeys(keys, leafOnly) { if (!this.nodeKey) throw new Error('[Tree] nodeKey is required in setCheckedNodes'); this.store.setCheckedKeys(keys, leafOnly); }, setChecked: function setChecked(data, checked, deep) { this.store.setChecked(data, checked, deep); }, handleNodeExpand: function handleNodeExpand(nodeData, node, instance) { this.broadcast('ElTreeNode', 'tree-node-expand', node); this.$emit('node-expand', nodeData, node, instance); } }, created: function created() { this.isTree = true; this.store = new _treeStore2.default({ key: this.nodeKey, data: this.data, lazy: this.lazy, props: this.props, load: this.load, currentNodeKey: this.currentNodeKey, checkStrictly: this.checkStrictly, defaultCheckedKeys: this.defaultCheckedKeys, defaultExpandedKeys: this.defaultExpandedKeys, autoExpandParent: this.autoExpandParent, defaultExpandAll: this.defaultExpandAll, filterNodeMethod: this.filterNodeMethod }); <|fim▁hole|> // // // // // // // // // // // // // // // /***/ }, /***/ 356: /***/ function(module, exports, __webpack_require__) { 'use strict'; exports.__esModule = true; var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; var _node = __webpack_require__(357); var _node2 = _interopRequireDefault(_node); var _util = __webpack_require__(358); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } var TreeStore = function () { function TreeStore(options) { var _this = this; _classCallCheck(this, TreeStore); this.currentNode = null; this.currentNodeKey = null; for (var option in options) { if (options.hasOwnProperty(option)) { this[option] = options[option]; } } this.nodesMap = {}; this.root = new _node2.default({ data: this.data, store: this }); if (this.lazy && this.load) { var loadFn = this.load; loadFn(this.root, function (data) { _this.root.doCreateChildren(data); _this._initDefaultCheckedNodes(); }); } else { this._initDefaultCheckedNodes(); } } TreeStore.prototype.filter = function filter(value) { var filterNodeMethod = this.filterNodeMethod; var traverse = function traverse(node) { var childNodes = node.root ? node.root.childNodes : node.childNodes; childNodes.forEach(function (child) { child.visible = filterNodeMethod.call(child, value, child.data, child); traverse(child); }); if (!node.visible && childNodes.length) { var allHidden = true; childNodes.forEach(function (child) { if (child.visible) allHidden = false; }); if (node.root) { node.root.visible = allHidden === false; } else { node.visible = allHidden === false; } } if (node.visible && !node.isLeaf) node.expand(); }; traverse(this); }; TreeStore.prototype.setData = function setData(newVal) { var instanceChanged = newVal !== this.root.data; this.root.setData(newVal); if (instanceChanged) { this._initDefaultCheckedNodes(); } }; TreeStore.prototype.getNode = function getNode(data) { var key = (typeof data === 'undefined' ? 'undefined' : _typeof(data)) !== 'object' ? data : (0, _util.getNodeKey)(this.key, data); return this.nodesMap[key]; }; TreeStore.prototype.insertBefore = function insertBefore(data, refData) { var refNode = this.getNode(refData); refNode.parent.insertBefore({ data: data }, refNode); }; TreeStore.prototype.insertAfter = function insertAfter(data, refData) { var refNode = this.getNode(refData); refNode.parent.insertAfter({ data: data }, refNode); }; TreeStore.prototype.remove = function remove(data) { var node = this.getNode(data); if (node) { node.parent.removeChild(node); } }; TreeStore.prototype.append = function append(data, parentData) { var parentNode = parentData ? this.getNode(parentData) : this.root; if (parentNode) { parentNode.insertChild({ data: data }); } }; TreeStore.prototype._initDefaultCheckedNodes = function _initDefaultCheckedNodes() { var _this2 = this; var defaultCheckedKeys = this.defaultCheckedKeys || []; var nodesMap = this.nodesMap; defaultCheckedKeys.forEach(function (checkedKey) { var node = nodesMap[checkedKey]; if (node) { node.setChecked(true, !_this2.checkStrictly); } }); }; TreeStore.prototype._initDefaultCheckedNode = function _initDefaultCheckedNode(node) { var defaultCheckedKeys = this.defaultCheckedKeys || []; if (defaultCheckedKeys.indexOf(node.key) !== -1) { node.setChecked(true, !this.checkStrictly); } }; TreeStore.prototype.setDefaultCheckedKey = function setDefaultCheckedKey(newVal) { if (newVal !== this.defaultCheckedKeys) { this.defaultCheckedKeys = newVal; this._initDefaultCheckedNodes(); } }; TreeStore.prototype.registerNode = function registerNode(node) { var key = this.key; if (!key || !node || !node.data) return; var nodeKey = node.key; if (nodeKey !== undefined) this.nodesMap[node.key] = node; }; TreeStore.prototype.deregisterNode = function deregisterNode(node) { var key = this.key; if (!key || !node || !node.data) return; delete this.nodesMap[node.key]; }; TreeStore.prototype.getCheckedNodes = function getCheckedNodes() { var leafOnly = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; var checkedNodes = []; var traverse = function traverse(node) { var childNodes = node.root ? node.root.childNodes : node.childNodes; childNodes.forEach(function (child) { if (!leafOnly && child.checked || leafOnly && child.isLeaf && child.checked) { checkedNodes.push(child.data); } traverse(child); }); }; traverse(this); return checkedNodes; }; TreeStore.prototype.getCheckedKeys = function getCheckedKeys() { var leafOnly = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; var key = this.key; var allNodes = this._getAllNodes(); var keys = []; allNodes.forEach(function (node) { if (!leafOnly || leafOnly && node.isLeaf) { if (node.checked) { keys.push((node.data || {})[key]); } } }); return keys; }; TreeStore.prototype._getAllNodes = function _getAllNodes() { var allNodes = []; var nodesMap = this.nodesMap; for (var nodeKey in nodesMap) { if (nodesMap.hasOwnProperty(nodeKey)) { allNodes.push(nodesMap[nodeKey]); } } return allNodes; }; TreeStore.prototype._setCheckedKeys = function _setCheckedKeys(key) { var _this3 = this; var leafOnly = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; var checkedKeys = arguments[2]; var allNodes = this._getAllNodes(); allNodes.sort(function (a, b) { return b.level - a.level; }); var keys = Object.keys(checkedKeys); allNodes.forEach(function (node) { var checked = keys.indexOf(node.data[key] + '') > -1; if (!node.isLeaf) { if (!_this3.checkStrictly) { var childNodes = node.childNodes; var all = true; var none = true; for (var i = 0, j = childNodes.length; i < j; i++) { var child = childNodes[i]; if (child.checked !== true || child.indeterminate) { all = false; } if (child.checked !== false || child.indeterminate) { none = false; } } if (all) { node.setChecked(true, !_this3.checkStrictly); } else if (!all && !none) { checked = checked ? true : 'half'; node.setChecked(checked, !_this3.checkStrictly && checked === true); } else if (none) { node.setChecked(checked, !_this3.checkStrictly); } } else { node.setChecked(checked, false); } if (leafOnly) { (function () { node.setChecked(false, false); var traverse = function traverse(node) { var childNodes = node.childNodes; childNodes.forEach(function (child) { if (!child.isLeaf) { child.setChecked(false, false); } traverse(child); }); }; traverse(node); })(); } } else { node.setChecked(checked, false); } }); }; TreeStore.prototype.setCheckedNodes = function setCheckedNodes(array) { var leafOnly = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; var key = this.key; var checkedKeys = {}; array.forEach(function (item) { checkedKeys[(item || {})[key]] = true; }); this._setCheckedKeys(key, leafOnly, checkedKeys); }; TreeStore.prototype.setCheckedKeys = function setCheckedKeys(keys) { var leafOnly = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false; this.defaultCheckedKeys = keys; var key = this.key; var checkedKeys = {}; keys.forEach(function (key) { checkedKeys[key] = true; }); this._setCheckedKeys(key, leafOnly, checkedKeys); }; TreeStore.prototype.setDefaultExpandedKeys = function setDefaultExpandedKeys(keys) { var _this4 = this; keys = keys || []; this.defaultExpandedKeys = keys; keys.forEach(function (key) { var node = _this4.getNode(key); if (node) node.expand(null, _this4.autoExpandParent); }); }; TreeStore.prototype.setChecked = function setChecked(data, checked, deep) { var node = this.getNode(data); if (node) { node.setChecked(!!checked, deep); } }; TreeStore.prototype.getCurrentNode = function getCurrentNode() { return this.currentNode; }; TreeStore.prototype.setCurrentNode = function setCurrentNode(node) { this.currentNode = node; }; TreeStore.prototype.setCurrentNodeKey = function setCurrentNodeKey(key) { var node = this.getNode(key); if (node) { this.currentNode = node; } }; return TreeStore; }(); exports.default = TreeStore; ; /***/ }, /***/ 357: /***/ function(module, exports, __webpack_require__) { 'use strict'; exports.__esModule = true; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _merge = __webpack_require__(169); var _merge2 = _interopRequireDefault(_merge); var _util = __webpack_require__(358); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } var reInitChecked = function reInitChecked(node) { var siblings = node.childNodes; var all = true; var none = true; for (var i = 0, j = siblings.length; i < j; i++) { var sibling = siblings[i]; if (sibling.checked !== true || sibling.indeterminate) { all = false; } if (sibling.checked !== false || sibling.indeterminate) { none = false; } } if (all) { node.setChecked(true); } else if (!all && !none) { node.setChecked('half'); } else if (none) { node.setChecked(false); } }; var getPropertyFromData = function getPropertyFromData(node, prop) { var props = node.store.props; var data = node.data || {}; var config = props[prop]; if (typeof config === 'function') { return config(data, node); } else if (typeof config === 'string') { return data[config]; } else if (typeof config === 'undefined') { return ''; } }; var nodeIdSeed = 0; var Node = function () { function Node(options) { _classCallCheck(this, Node); this.id = nodeIdSeed++; this.text = null; this.checked = false; this.indeterminate = false; this.data = null; this.expanded = false; this.parent = null; this.visible = true; for (var name in options) { if (options.hasOwnProperty(name)) { this[name] = options[name]; } } // internal this.level = 0; this.loaded = false; this.childNodes = []; this.loading = false; if (this.parent) { this.level = this.parent.level + 1; } var store = this.store; if (!store) { throw new Error('[Node]store is required!'); } store.registerNode(this); var props = store.props; if (props && typeof props.isLeaf !== 'undefined') { var isLeaf = getPropertyFromData(this, 'isLeaf'); if (typeof isLeaf === 'boolean') { this.isLeafByUser = isLeaf; } } if (store.lazy !== true && this.data) { this.setData(this.data); if (store.defaultExpandAll) { this.expanded = true; } } else if (this.level > 0 && store.lazy && store.defaultExpandAll) { this.expand(); } if (!this.data) return; var defaultExpandedKeys = store.defaultExpandedKeys; var key = store.key; if (key && defaultExpandedKeys && defaultExpandedKeys.indexOf(this.key) !== -1) { this.expand(null, store.autoExpandParent); } if (key && store.currentNodeKey && this.key === store.currentNodeKey) { store.currentNode = this; } if (store.lazy) { store._initDefaultCheckedNode(this); } this.updateLeafState(); } Node.prototype.setData = function setData(data) { if (!Array.isArray(data)) { (0, _util.markNodeData)(this, data); } this.data = data; this.childNodes = []; var children = void 0; if (this.level === 0 && this.data instanceof Array) { children = this.data; } else { children = getPropertyFromData(this, 'children') || []; } for (var i = 0, j = children.length; i < j; i++) { this.insertChild({ data: children[i] }); } }; Node.prototype.insertChild = function insertChild(child, index) { if (!child) throw new Error('insertChild error: child is required.'); if (!(child instanceof Node)) { (0, _merge2.default)(child, { parent: this, store: this.store }); child = new Node(child); } child.level = this.level + 1; if (typeof index === 'undefined' || index < 0) { this.childNodes.push(child); } else { this.childNodes.splice(index, 0, child); } this.updateLeafState(); }; Node.prototype.insertBefore = function insertBefore(child, ref) { var index = void 0; if (ref) { index = this.childNodes.indexOf(ref); } this.insertChild(child, index); }; Node.prototype.insertAfter = function insertAfter(child, ref) { var index = void 0; if (ref) { index = this.childNodes.indexOf(ref); if (index !== -1) index += 1; } this.insertChild(child, index); }; Node.prototype.removeChild = function removeChild(child) { var index = this.childNodes.indexOf(child); if (index > -1) { this.store && this.store.deregisterNode(child); child.parent = null; this.childNodes.splice(index, 1); } this.updateLeafState(); }; Node.prototype.removeChildByData = function removeChildByData(data) { var targetNode = null; this.childNodes.forEach(function (node) { if (node.data === data) { targetNode = node; } }); if (targetNode) { this.removeChild(targetNode); } }; Node.prototype.expand = function expand(callback, expandParent) { var _this = this; var done = function done() { if (expandParent) { var parent = _this.parent; while (parent.level > 0) { parent.expanded = true; parent = parent.parent; } } _this.expanded = true; if (callback) callback(); }; if (this.shouldLoadData()) { this.loadData(function (data) { if (data instanceof Array) { done(); } }); } else { done(); } }; Node.prototype.doCreateChildren = function doCreateChildren(array) { var _this2 = this; var defaultProps = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; array.forEach(function (item) { _this2.insertChild((0, _merge2.default)({ data: item }, defaultProps)); }); }; Node.prototype.collapse = function collapse() { this.expanded = false; }; Node.prototype.shouldLoadData = function shouldLoadData() { return this.store.lazy === true && this.store.load && !this.loaded; }; Node.prototype.updateLeafState = function updateLeafState() { if (this.store.lazy === true && this.loaded !== true && typeof this.isLeafByUser !== 'undefined') { this.isLeaf = this.isLeafByUser; return; } var childNodes = this.childNodes; if (!this.store.lazy || this.store.lazy === true && this.loaded === true) { this.isLeaf = !childNodes || childNodes.length === 0; return; } this.isLeaf = false; }; Node.prototype.setChecked = function setChecked(value, deep) { var _this3 = this; this.indeterminate = value === 'half'; this.checked = value === true; var handleDescendants = function handleDescendants() { if (deep) { var childNodes = _this3.childNodes; for (var i = 0, j = childNodes.length; i < j; i++) { var child = childNodes[i]; child.setChecked(value !== false, deep); } } }; if (!this.store.checkStrictly && this.shouldLoadData()) { // Only work on lazy load data. this.loadData(function () { handleDescendants(); }, { checked: value !== false }); } else { handleDescendants(); } var parent = this.parent; if (!parent || parent.level === 0) return; if (!this.store.checkStrictly) { reInitChecked(parent); } }; Node.prototype.getChildren = function getChildren() { // this is data var data = this.data; if (!data) return null; var props = this.store.props; var children = 'children'; if (props) { children = props.children || 'children'; } if (data[children] === undefined) { data[children] = null; } return data[children]; }; Node.prototype.updateChildren = function updateChildren() { var _this4 = this; var newData = this.getChildren() || []; var oldData = this.childNodes.map(function (node) { return node.data; }); var newDataMap = {}; var newNodes = []; newData.forEach(function (item, index) { if (item[_util.NODE_KEY]) { newDataMap[item[_util.NODE_KEY]] = { index: index, data: item }; } else { newNodes.push({ index: index, data: item }); } }); oldData.forEach(function (item) { if (!newDataMap[item[_util.NODE_KEY]]) _this4.removeChildByData(item); }); newNodes.forEach(function (_ref) { var index = _ref.index, data = _ref.data; _this4.insertChild({ data: data }, index); }); this.updateLeafState(); }; Node.prototype.loadData = function loadData(callback) { var _this5 = this; var defaultProps = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; if (this.store.lazy === true && this.store.load && !this.loaded && !this.loading) { this.loading = true; var resolve = function resolve(children) { _this5.loaded = true; _this5.loading = false; _this5.childNodes = []; _this5.doCreateChildren(children, defaultProps); _this5.updateLeafState(); if (callback) { callback.call(_this5, children); } }; this.store.load(this, resolve); } else { if (callback) { callback.call(this); } } }; _createClass(Node, [{ key: 'label', get: function get() { return getPropertyFromData(this, 'label'); } }, { key: 'icon', get: function get() { return getPropertyFromData(this, 'icon'); } }, { key: 'key', get: function get() { var nodeKey = this.store.key; if (this.data) return this.data[nodeKey]; return null; } }]); return Node; }(); exports.default = Node; /***/ }, /***/ 358: /***/ function(module, exports) { 'use strict'; exports.__esModule = true; var NODE_KEY = exports.NODE_KEY = '$treeNodeId'; var markNodeData = exports.markNodeData = function markNodeData(node, data) { if (data[NODE_KEY]) return; Object.defineProperty(data, NODE_KEY, { value: node.id, enumerable: false, configurable: false, writable: false }); }; var getNodeKey = exports.getNodeKey = function getNodeKey(key, data) { if (!key) return data[NODE_KEY]; return data[key]; }; /***/ }, /***/ 359: /***/ function(module, exports, __webpack_require__) { var Component = __webpack_require__(3)( /* script */ __webpack_require__(360), /* template */ __webpack_require__(361), /* styles */ null, /* scopeId */ null, /* moduleIdentifier (server only) */ null ) module.exports = Component.exports /***/ }, /***/ 360: /***/ function(module, exports, __webpack_require__) { 'use strict'; exports.__esModule = true; var _collapseTransition = __webpack_require__(85); var _collapseTransition2 = _interopRequireDefault(_collapseTransition); var _checkbox = __webpack_require__(307); var _checkbox2 = _interopRequireDefault(_checkbox); var _emitter = __webpack_require__(13); var _emitter2 = _interopRequireDefault(_emitter); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = { name: 'ElTreeNode', componentName: 'ElTreeNode', mixins: [_emitter2.default], props: { node: { default: function _default() { return {}; } }, props: {}, renderContent: Function }, components: { ElCollapseTransition: _collapseTransition2.default, ElCheckbox: _checkbox2.default, NodeContent: { props: { node: { required: true } }, render: function render(h) { var parent = this.$parent; var node = this.node; var data = node.data; var store = node.store; return parent.renderContent ? parent.renderContent.call(parent._renderProxy, h, { _self: parent.tree.$vnode.context, node: node, data: data, store: store }) : h( 'span', { 'class': 'el-tree-node__label' }, [this.node.label] ); } } }, data: function data() { return { tree: null, expanded: false, childNodeRendered: false, showCheckbox: false, oldChecked: null, oldIndeterminate: null }; }, watch: { 'node.indeterminate': function nodeIndeterminate(val) { this.handleSelectChange(this.node.checked, val); }, 'node.checked': function nodeChecked(val) { this.handleSelectChange(val, this.node.indeterminate); }, 'node.expanded': function nodeExpanded(val) { this.expanded = val; if (val) { this.childNodeRendered = true; } } }, methods: { getNodeKey: function getNodeKey(node, index) { var nodeKey = this.tree.nodeKey; if (nodeKey && node) { return node.data[nodeKey]; } return index; }, handleSelectChange: function handleSelectChange(checked, indeterminate) { if (this.oldChecked !== checked && this.oldIndeterminate !== indeterminate) { this.tree.$emit('check-change', this.node.data, checked, indeterminate); } this.oldChecked = checked; this.indeterminate = indeterminate; }, handleClick: function handleClick() { var store = this.tree.store; store.setCurrentNode(this.node); this.tree.$emit('current-change', store.currentNode ? store.currentNode.data : null, store.currentNode); this.tree.currentNode = this; if (this.tree.expandOnClickNode) { this.handleExpandIconClick(); } this.tree.$emit('node-click', this.node.data, this.node, this); }, handleExpandIconClick: function handleExpandIconClick() { if (this.node.isLeaf) return; if (this.expanded) { this.tree.$emit('node-collapse', this.node.data, this.node, this); this.node.collapse(); } else { this.node.expand(); this.$emit('node-expand', this.node.data, this.node, this); } }, handleUserClick: function handleUserClick() { if (this.node.indeterminate) { this.node.setChecked(this.node.checked, !this.tree.checkStrictly); } }, handleCheckChange: function handleCheckChange(ev) { if (!this.node.indeterminate) { this.node.setChecked(ev.target.checked, !this.tree.checkStrictly); } }, handleChildNodeExpand: function handleChildNodeExpand(nodeData, node, instance) { this.broadcast('ElTreeNode', 'tree-node-expand', node); this.tree.$emit('node-expand', nodeData, node, instance); } }, created: function created() { var _this = this; var parent = this.$parent; if (parent.isTree) { this.tree = parent; } else { this.tree = parent.tree; } var tree = this.tree; if (!tree) { console.warn('Can not find node\'s tree.'); } var props = tree.props || {}; var childrenKey = props['children'] || 'children'; this.$watch('node.data.' + childrenKey, function () { _this.node.updateChildren(); }); this.showCheckbox = tree.showCheckbox; if (this.node.expanded) { this.expanded = true; this.childNodeRendered = true; } if (this.tree.accordion) { this.$on('tree-node-expand', function (node) { if (_this.node !== node) { _this.node.collapse(); } }); } } }; // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // // /***/ }, /***/ 361: /***/ function(module, exports) { module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h; return _c('div', { directives: [{ name: "show", rawName: "v-show", value: (_vm.node.visible), expression: "node.visible" }], staticClass: "el-tree-node", class: { 'is-expanded': _vm.childNodeRendered && _vm.expanded, 'is-current': _vm.tree.store.currentNode === _vm.node, 'is-hidden': !_vm.node.visible }, on: { "click": function($event) { $event.stopPropagation(); _vm.handleClick($event) } } }, [_c('div', { staticClass: "el-tree-node__content", style: ({ 'padding-left': (_vm.node.level - 1) * _vm.tree.indent + 'px' }) }, [_c('span', { staticClass: "el-tree-node__expand-icon", class: { 'is-leaf': _vm.node.isLeaf, expanded: !_vm.node.isLeaf && _vm.expanded }, on: { "click": function($event) { $event.stopPropagation(); _vm.handleExpandIconClick($event) } } }), (_vm.showCheckbox) ? _c('el-checkbox', { attrs: { "indeterminate": _vm.node.indeterminate }, on: { "change": _vm.handleCheckChange }, nativeOn: { "click": function($event) { $event.stopPropagation(); _vm.handleUserClick($event) } }, model: { value: (_vm.node.checked), callback: function($$v) { _vm.node.checked = $$v }, expression: "node.checked" } }) : _vm._e(), (_vm.node.loading) ? _c('span', { staticClass: "el-tree-node__loading-icon el-icon-loading" }) : _vm._e(), _c('node-content', { attrs: { "node": _vm.node } })], 1), _c('el-collapse-transition', [_c('div', { directives: [{ name: "show", rawName: "v-show", value: (_vm.expanded), expression: "expanded" }], staticClass: "el-tree-node__children" }, _vm._l((_vm.node.childNodes), function(child) { return _c('el-tree-node', { key: _vm.getNodeKey(child), attrs: { "render-content": _vm.renderContent, "node": child }, on: { "node-expand": _vm.handleChildNodeExpand } }) }))])], 1) },staticRenderFns: []} /***/ }, /***/ 362: /***/ function(module, exports) { module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h; return _c('div', { staticClass: "el-tree", class: { 'el-tree--highlight-current': _vm.highlightCurrent } }, [_vm._l((_vm.root.childNodes), function(child) { return _c('el-tree-node', { key: _vm.getNodeKey(child), attrs: { "node": child, "props": _vm.props, "render-content": _vm.renderContent }, on: { "node-expand": _vm.handleNodeExpand } }) }), (!_vm.root.childNodes || _vm.root.childNodes.length === 0) ? _c('div', { staticClass: "el-tree__empty-block" }, [_c('span', { staticClass: "el-tree__empty-text" }, [_vm._v(_vm._s(_vm.emptyText))])]) : _vm._e()], 2) },staticRenderFns: []} /***/ } /******/ });<|fim▁end|>
this.root = this.store.root; } }; //
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 mod aliases; pub mod ast; mod byte_string; mod hex_string;<|fim▁hole|><|fim▁end|>
pub(crate) mod translate;
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
export * from './TestClass';
<|file_name|>test_patroni.py<|end_file_name|><|fim▁begin|>import etcd import logging import os import signal import time import unittest import patroni.config as config from mock import Mock, PropertyMock, patch from patroni.api import RestApiServer from patroni.async_executor import AsyncExecutor from patroni.dcs.etcd import AbstractEtcdClientWithFailover from patroni.exceptions import DCSError from patroni.postgresql import Postgresql from patroni.postgresql.config import ConfigHandler from patroni import check_psycopg from patroni.__main__ import Patroni, main as _main, patroni_main from six.moves import BaseHTTPServer, builtins from threading import Thread from . import psycopg_connect, SleepException from .test_etcd import etcd_read, etcd_write from .test_postgresql import MockPostmaster def mock_import(*args, **kwargs): if args[0] == 'psycopg': raise ImportError ret = Mock() ret.__version__ = '2.5.3.dev1 a b c' return ret class MockFrozenImporter(object): toc = set(['patroni.dcs.etcd']) @patch('time.sleep', Mock()) @patch('subprocess.call', Mock(return_value=0)) @patch('patroni.psycopg.connect', psycopg_connect) @patch.object(ConfigHandler, 'append_pg_hba', Mock()) @patch.object(ConfigHandler, 'write_postgresql_conf', Mock()) @patch.object(ConfigHandler, 'write_recovery_conf', Mock()) @patch.object(Postgresql, 'is_running', Mock(return_value=MockPostmaster())) @patch.object(Postgresql, 'call_nowait', Mock()) @patch.object(BaseHTTPServer.HTTPServer, '__init__', Mock()) @patch.object(AsyncExecutor, 'run', Mock()) @patch.object(etcd.Client, 'write', etcd_write) @patch.object(etcd.Client, 'read', etcd_read) class TestPatroni(unittest.TestCase): def test_no_config(self): self.assertRaises(SystemExit, patroni_main) @patch('sys.argv', ['patroni.py', '--validate-config', 'postgres0.yml']) def test_validate_config(self): self.assertRaises(SystemExit, patroni_main) @patch('pkgutil.iter_importers', Mock(return_value=[MockFrozenImporter()])) @patch('sys.frozen', Mock(return_value=True), create=True) @patch.object(BaseHTTPServer.HTTPServer, '__init__', Mock()) @patch.object(etcd.Client, 'read', etcd_read) @patch.object(Thread, 'start', Mock()) @patch.object(AbstractEtcdClientWithFailover, 'machines', PropertyMock(return_value=['http://remotehost:2379'])) def setUp(self): self._handlers = logging.getLogger().handlers[:] RestApiServer._BaseServer__is_shut_down = Mock() RestApiServer._BaseServer__shutdown_request = True RestApiServer.socket = 0 os.environ['PATRONI_POSTGRESQL_DATA_DIR'] = 'data/test0' conf = config.Config('postgres0.yml') self.p = Patroni(conf) def tearDown(self): logging.getLogger().handlers[:] = self._handlers @patch('patroni.dcs.AbstractDCS.get_cluster', Mock(side_effect=[None, DCSError('foo'), None])) def test_load_dynamic_configuration(self): self.p.config._dynamic_configuration = {} self.p.load_dynamic_configuration() self.p.load_dynamic_configuration() @patch('sys.argv', ['patroni.py', 'postgres0.yml']) @patch('time.sleep', Mock(side_effect=SleepException)) @patch.object(etcd.Client, 'delete', Mock()) @patch.object(AbstractEtcdClientWithFailover, 'machines', PropertyMock(return_value=['http://remotehost:2379'])) @patch.object(Thread, 'join', Mock()) def test_patroni_patroni_main(self): with patch('subprocess.call', Mock(return_value=1)): with patch.object(Patroni, 'run', Mock(side_effect=SleepException)): os.environ['PATRONI_POSTGRESQL_DATA_DIR'] = 'data/test0' self.assertRaises(SleepException, patroni_main) with patch.object(Patroni, 'run', Mock(side_effect=KeyboardInterrupt())): with patch('patroni.ha.Ha.is_paused', Mock(return_value=True)): os.environ['PATRONI_POSTGRESQL_DATA_DIR'] = 'data/test0' patroni_main() @patch('os.getpid') @patch('multiprocessing.Process') @patch('patroni.__main__.patroni_main', Mock()) def test_patroni_main(self, mock_process, mock_getpid): mock_getpid.return_value = 2 _main() mock_getpid.return_value = 1 def mock_signal(signo, handler): handler(signo, None) with patch('signal.signal', mock_signal): with patch('os.waitpid', Mock(side_effect=[(1, 0), (0, 0)])): _main() with patch('os.waitpid', Mock(side_effect=OSError)): _main() ref = {'passtochild': lambda signo, stack_frame: 0} def mock_sighup(signo, handler): if hasattr(signal, 'SIGHUP') and signo == signal.SIGHUP: ref['passtochild'] = handler def mock_join(): ref['passtochild'](0, None) mock_process.return_value.join = mock_join with patch('signal.signal', mock_sighup), patch('os.kill', Mock()): self.assertIsNone(_main()) @patch('patroni.config.Config.save_cache', Mock()) @patch('patroni.config.Config.reload_local_configuration', Mock(return_value=True)) @patch('patroni.ha.Ha.is_leader', Mock(return_value=True)) @patch.object(Postgresql, 'state', PropertyMock(return_value='running')) @patch.object(Postgresql, 'data_directory_empty', Mock(return_value=False)) def test_run(self): self.p.postgresql.set_role('replica') self.p.sighup_handler() self.p.ha.dcs.watch = Mock(side_effect=SleepException) self.p.api.start = Mock() self.p.logger.start = Mock() self.p.config._dynamic_configuration = {} self.assertRaises(SleepException, self.p.run) with patch('patroni.config.Config.reload_local_configuration', Mock(return_value=False)): self.p.sighup_handler() self.assertRaises(SleepException, self.p.run) with patch('patroni.config.Config.set_dynamic_configuration', Mock(return_value=True)): self.assertRaises(SleepException, self.p.run) with patch('patroni.postgresql.Postgresql.data_directory_empty', Mock(return_value=False)): self.assertRaises(SleepException, self.p.run) def test_sigterm_handler(self): self.assertRaises(SystemExit, self.p.sigterm_handler) def test_schedule_next_run(self): self.p.ha.cluster = Mock() self.p.ha.dcs.watch = Mock(return_value=True) self.p.schedule_next_run() self.p.next_run = time.time() - self.p.dcs.loop_wait - 1 self.p.schedule_next_run() def test_noloadbalance(self): self.p.tags['noloadbalance'] = True self.assertTrue(self.p.noloadbalance) def test_nofailover(self): self.p.tags['nofailover'] = True self.assertTrue(self.p.nofailover) self.p.tags['nofailover'] = None self.assertFalse(self.p.nofailover) def test_replicatefrom(self): self.assertIsNone(self.p.replicatefrom) self.p.tags['replicatefrom'] = 'foo' self.assertEqual(self.p.replicatefrom, 'foo') def test_reload_config(self): self.p.reload_config() self.p.get_tags = Mock(side_effect=Exception) self.p.reload_config(local=True) def test_nosync(self): self.p.tags['nosync'] = True self.assertTrue(self.p.nosync) self.p.tags['nosync'] = None self.assertFalse(self.p.nosync) @patch.object(Thread, 'join', Mock())<|fim▁hole|> def test_check_psycopg(self): with patch.object(builtins, '__import__', Mock(side_effect=ImportError)): self.assertRaises(SystemExit, check_psycopg) with patch.object(builtins, '__import__', mock_import): self.assertRaises(SystemExit, check_psycopg)<|fim▁end|>
def test_shutdown(self): self.p.api.shutdown = Mock(side_effect=Exception) self.p.ha.shutdown = Mock(side_effect=Exception) self.p.shutdown()
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#![cfg(target_os = "android")] use crate::api::egl::{ Context as EglContext, NativeDisplay, SurfaceType as EglSurfaceType, }; use crate::CreationError::{self, OsError}; use crate::{ Api, ContextError, GlAttributes, PixelFormat, PixelFormatRequirements, Rect, }; use crate::platform::android::EventLoopExtAndroid; use glutin_egl_sys as ffi; use parking_lot::Mutex; use winit; use winit::dpi; use winit::event_loop::EventLoopWindowTarget; use winit::window::WindowBuilder; use std::sync::Arc; #[derive(Debug)] struct AndroidContext { egl_context: EglContext, stopped: Option<Mutex<bool>>, } #[derive(Debug)] pub struct Context(Arc<AndroidContext>); #[derive(Debug)] struct AndroidSyncEventHandler(Arc<AndroidContext>); impl android_glue::SyncEventHandler for AndroidSyncEventHandler { fn handle(&mut self, event: &android_glue::Event) { match *event { // 'on_surface_destroyed' Android event can arrive with some delay // because multithreading communication. Because of // that, swap_buffers can be called before processing // 'on_surface_destroyed' event, with the native window // surface already destroyed. EGL generates a BAD_SURFACE error in // this situation. Set stop to true to prevent // swap_buffer call race conditions. android_glue::Event::TermWindow => { let mut stopped = self.0.stopped.as_ref().unwrap().lock(); *stopped = true; } _ => { return; } }; } } impl Context { #[inline] pub fn new_windowed<T>( wb: WindowBuilder, el: &EventLoopWindowTarget<T>, pf_reqs: &PixelFormatRequirements, gl_attr: &GlAttributes<&Self>, ) -> Result<(winit::window::Window, Self), CreationError> { let win = wb.build(el)?; let gl_attr = gl_attr.clone().map_sharing(|c| &c.0.egl_context); let nwin = unsafe { android_glue::get_native_window() }; if nwin.is_null() { return Err(OsError("Android's native window is null".to_string())); } let native_display = NativeDisplay::Android; let egl_context = EglContext::new( pf_reqs, &gl_attr, native_display, EglSurfaceType::Window, |c, _| Ok(c[0]), ) .and_then(|p| p.finish(nwin as *const _))?; let ctx = Arc::new(AndroidContext { egl_context, stopped: Some(Mutex::new(false)), }); let handler = Box::new(AndroidSyncEventHandler(ctx.clone())); android_glue::add_sync_event_handler(handler); let context = Context(ctx.clone()); el.set_suspend_callback(Some(Box::new(move |suspended| { let mut stopped = ctx.stopped.as_ref().unwrap().lock(); *stopped = suspended; if suspended { // Android has stopped the activity or sent it to background. // Release the EGL surface and stop the animation loop. unsafe { ctx.egl_context.on_surface_destroyed(); } } else { // Android has started the activity or sent it to foreground. // Restore the EGL surface and animation loop. unsafe { let nwin = android_glue::get_native_window();<|fim▁hole|> Ok((win, context)) } #[inline] pub fn new_headless<T>( _el: &EventLoopWindowTarget<T>, pf_reqs: &PixelFormatRequirements, gl_attr: &GlAttributes<&Context>, size: dpi::PhysicalSize<u32>, ) -> Result<Self, CreationError> { let gl_attr = gl_attr.clone().map_sharing(|c| &c.0.egl_context); let context = EglContext::new( pf_reqs, &gl_attr, NativeDisplay::Android, EglSurfaceType::PBuffer, |c, _| Ok(c[0]), )?; let egl_context = context.finish_pbuffer(size)?; let ctx = Arc::new(AndroidContext { egl_context, stopped: None, }); Ok(Context(ctx)) } #[inline] pub unsafe fn make_current(&self) -> Result<(), ContextError> { if let Some(ref stopped) = self.0.stopped { let stopped = stopped.lock(); if *stopped { return Err(ContextError::ContextLost); } } self.0.egl_context.make_current() } #[inline] pub unsafe fn make_not_current(&self) -> Result<(), ContextError> { if let Some(ref stopped) = self.0.stopped { let stopped = stopped.lock(); if *stopped { return Err(ContextError::ContextLost); } } self.0.egl_context.make_not_current() } #[inline] pub fn resize(&self, _: u32, _: u32) {} #[inline] pub fn is_current(&self) -> bool { self.0.egl_context.is_current() } #[inline] pub fn get_proc_address(&self, addr: &str) -> *const core::ffi::c_void { self.0.egl_context.get_proc_address(addr) } #[inline] pub fn swap_buffers(&self) -> Result<(), ContextError> { if let Some(ref stopped) = self.0.stopped { let stopped = stopped.lock(); if *stopped { return Err(ContextError::ContextLost); } } self.0.egl_context.swap_buffers() } #[inline] pub fn swap_buffers_with_damage( &self, rects: &[Rect], ) -> Result<(), ContextError> { if let Some(ref stopped) = self.0.stopped { let stopped = stopped.lock(); if *stopped { return Err(ContextError::ContextLost); } } self.0.egl_context.swap_buffers_with_damage(rects) } #[inline] pub fn swap_buffers_with_damage_supported(&self) -> bool { self.0.egl_context.swap_buffers_with_damage_supported() } #[inline] pub fn get_api(&self) -> Api { self.0.egl_context.get_api() } #[inline] pub fn get_pixel_format(&self) -> PixelFormat { self.0.egl_context.get_pixel_format() } #[inline] pub unsafe fn raw_handle(&self) -> ffi::EGLContext { self.0.egl_context.raw_handle() } #[inline] pub unsafe fn get_egl_display(&self) -> ffi::EGLDisplay { self.0.egl_context.get_egl_display() } }<|fim▁end|>
ctx.egl_context.on_surface_created(nwin as *const _); } } })));
<|file_name|>sender_test.go<|end_file_name|><|fim▁begin|>package loki_test import ( "bytes" "encoding/json" "fmt" "testing" "time" "github.com/andviro/goldie" "github.com/andviro/grayproxy/pkg/loki" "github.com/prometheus/common/model" ) type testHandler bytes.Buffer func (t *testHandler) Handle(ls model.LabelSet, ts time.Time, s string) error { buf := (*bytes.Buffer)(t) jd, _ := json.Marshal(ts) fmt.Fprintf(buf, "%s\n", jd) jd, _ = json.MarshalIndent(ls, "", "\t") fmt.Fprintf(buf, "%s\n", jd) fmt.Fprintf(buf, "%s\n", s) return nil } func TestSender_Send(t *testing.T) { buf := new(bytes.Buffer) s := loki.Sender{Handler: (*testHandler)(buf), Job: "test"} err := s.Send([]byte(`{ "version": "1.1", "host": "example.org", "short_message": "A short message that helps you identify what is going on",<|fim▁hole|> "timestamp": 1385053862.3072, "level": 1, "_user_id": 9001, "_some_info": "foo", "_some_env_var": "bar" }`)) if err != nil { t.Fatal(err) } goldie.Assert(t, "sender-send", buf.Bytes()) }<|fim▁end|>
"full_message": "Backtrace here\n\nmore stuff",
<|file_name|>modbus_server.rs<|end_file_name|><|fim▁begin|>use crate::prelude::*; use libmodbus_sys as ffi; /// The server is waiting for request from clients and must answer when it is concerned by the request. The libmodbus /// offers the following functions to handle requests: /// /// * Receive /// - [`receive()`](struct.Modbus.html#method.receive) /// * Reply /// - [`reply()`](struct.Modbus.html#method.reply), [`reply_exception()`](struct.Modbus.html#method.reply_exception) /// pub trait ModbusServer { fn receive(&self, request: &mut [u8]) -> Result<i32, Error>; fn reply( &self, request: &[u8], request_len: i32, modbus_mapping: &ModbusMapping,<|fim▁hole|>} impl ModbusServer for Modbus { /// `receive` - receive an indication request /// /// The [`receive()`](#method.receive) function shall receive an indication request from the socket of the context /// ctx. /// This function is used by Modbus slave/server to receive and analyze indication request sent by the /// masters/clients. /// /// If you need to use another socket or file descriptor than the one defined in the context ctx, see the function /// [`set_socket()`](struct.Modbus.html#method.set_socket). /// /// # Examples /// /// ```rust,no_run /// use libmodbus::{Modbus, ModbusServer, ModbusTCP}; /// let modbus = Modbus::new_tcp("127.0.0.1", 1502).unwrap(); /// let mut query = vec![0; Modbus::MAX_ADU_LENGTH as usize]; /// /// assert!(modbus.receive(&mut query).is_ok()); /// ``` fn receive(&self, request: &mut [u8]) -> Result<i32, Error> { assert!(request.len() <= Modbus::MAX_ADU_LENGTH as usize); unsafe { let len = ffi::modbus_receive(self.ctx, request.as_mut_ptr()); match len { -1 => Err(Error::Server { msg: "receive".to_owned(), source: ::std::io::Error::last_os_error(), }), len => Ok(len), } } } /// `modbus_reply` - send a reponse to the received request /// /// The [`reply()`](#method.reply) function shall send a response to received request. The request req given in /// argument is analyzed, a response is then built and sent by using the information of the modbus context ctx. /// If the request indicates to read or write a value the operation will done in the modbus mapping mb_mapping /// according to the type of the manipulated data. /// If an error occurs, an exception response will be sent. /// /// This function is designed for Modbus server. /// /// # Examples /// /// ```rust,no_run /// use libmodbus::{Modbus, ModbusServer, ModbusTCP}; /// /// let modbus = Modbus::new_tcp("127.0.0.1", 1502).unwrap(); /// let mut query = vec![0; Modbus::MAX_ADU_LENGTH as usize]; /// /// assert!(modbus.receive(&mut query).is_ok()); /// ``` fn reply( &self, request: &[u8], request_len: i32, modbus_mapping: &ModbusMapping, ) -> Result<i32, Error> { unsafe { let len = ffi::modbus_reply( self.ctx, request.as_ptr(), request_len, modbus_mapping.modbus_mapping, ); match len { -1 => Err(Error::Server { msg: "reply".to_owned(), source: ::std::io::Error::last_os_error(), }), len => Ok(len), } } } }<|fim▁end|>
) -> Result<i32, Error>;
<|file_name|>manaboy.py<|end_file_name|><|fim▁begin|>import time import net.mapserv as mapserv import net.charserv as charserv import commands import walkto import logicmanager import status import plugins from collections import deque from net.inventory import get_item_index, get_storage_index from utils import extends from actor import find_nearest_being from chat import send_whisper as whisper __all__ = [ 'PLUGIN', 'init' ] PLUGIN = { 'name': 'manaboy', 'requires': ('chatbot', 'npc', 'autofollow'), 'blocks': (), } npcdialog = { 'start_time': -1, 'program': [], } _times = { 'follow': 0, 'where' : 0, 'status' : 0, 'inventory' : 0, 'say' : 0, 'zeny' : 0, 'storage' : 0, } admins = ['Trav', 'Travolta', 'Komornyik'] allowed_drops = [535, 719, 513, 727, 729, 869] npc_owner = '' history = deque(maxlen=10) storage_is_open = False def set_npc_owner(nick): global npc_owner if plugins.npc.npc_id < 0: npc_owner = nick @extends('smsg_being_remove') def bot_dies(data): if data.id == charserv.server.account: mapserv.cmsg_player_respawn() @extends('smsg_npc_message') @extends('smsg_npc_choice') @extends('smsg_npc_close') @extends('smsg_npc_next') @extends('smsg_npc_int_input') @extends('smsg_npc_str_input') def npc_activity(data): npcdialog['start_time'] = time.time() @extends('smsg_npc_message') def npc_message(data): if not npc_owner: return npc = mapserv.beings_cache.findName(data.id) m = '[npc] {} : {}'.format(npc, data.message) whisper(npc_owner, m) @extends('smsg_npc_choice') def npc_choice(data): if not npc_owner: return choices = filter(lambda s: len(s.strip()) > 0, data.select.split(':')) whisper(npc_owner, '[npc][select] (use !input <number> to select)') for i, s in enumerate(choices): whisper(npc_owner, ' {}) {}'.format(i + 1, s)) @extends('smsg_npc_int_input') @extends('smsg_npc_str_input') def npc_input(data): if not npc_owner: return t = 'number' if plugins.npc.input_type == 'str': t = 'string' whisper(npc_owner, '[npc][input] (use !input <{}>)'.format(t)) @extends('smsg_storage_status') def storage_status(data): print 'storage_status' global storage_is_open storage_is_open = True _times['storage'] = time.time() if npc_owner: whisper(npc_owner, '[storage]') @extends('smsg_storage_items') @extends('smsg_storage_equip') def storage_items(data): if not npc_owner: return ls = status.invlists2(max_length=255, source='storage') for l in ls: whisper(npc_owner, l) @extends('smsg_storage_close') def storage_close(data): print 'smsg_storage_close' global storage_is_open storage_is_open = False _times['storage'] = 0 def cmd_where(nick, message, is_whisper, match): if not is_whisper: return msg = status.player_position() whisper(nick, msg) def cmd_goto(nick, message, is_whisper, match): if not is_whisper: return try: x = int(match.group(1)) y = int(match.group(2)) except ValueError: return set_npc_owner(nick) plugins.autofollow.follow = '' mapserv.cmsg_player_change_dest(x, y) def cmd_goclose(nick, message, is_whisper, match): if not is_whisper: return x = mapserv.player_pos['x'] y = mapserv.player_pos['y'] if message.startswith('!left'): x -= 1 elif message.startswith('!right'): x += 1 elif message.startswith('!up'): y -= 1 elif message.startswith('!down'): y += 1 set_npc_owner(nick) plugins.autofollow.follow = '' mapserv.cmsg_player_change_dest(x, y) def cmd_pickup(nick, message, is_whisper, match): if not is_whisper: return commands.pickup() def cmd_drop(nick, message, is_whisper, match): if not is_whisper: return try: amount = int(match.group(1)) item_id = int(match.group(2)) except ValueError: return if nick not in admins: if item_id not in allowed_drops: return index = get_item_index(item_id) if index > 0: mapserv.cmsg_player_inventory_drop(index, amount) def cmd_item_action(nick, message, is_whisper, match): if not is_whisper: return try: itemId = int(match.group(1)) except ValueError: return index = get_item_index(itemId) if index <= 0: return if message.startswith('!equip'): mapserv.cmsg_player_equip(index) elif message.startswith('!unequip'): mapserv.cmsg_player_unequip(index) elif message.startswith('!use'): mapserv.cmsg_player_inventory_use(index, itemId) def cmd_emote(nick, message, is_whisper, match): if not is_whisper: return try: emote = int(match.group(1)) except ValueError: return mapserv.cmsg_player_emote(emote) def cmd_attack(nick, message, is_whisper, match): if not is_whisper: return target_s = match.group(1) try: target = mapserv.beings_cache[int(target_s)] except (ValueError, KeyError): target = find_nearest_being(name=target_s, ignored_ids=walkto.unreachable_ids) if target is not None: set_npc_owner(nick) plugins.autofollow.follow = '' walkto.walkto_and_action(target, 'attack') def cmd_say(nick, message, is_whisper, match): if not is_whisper: return msg = match.group(1) whisper(nick, msg) def cmd_sit(nick, message, is_whisper, match): if not is_whisper: return plugins.autofollow.follow = '' mapserv.cmsg_player_change_act(0, 2) def cmd_turn(nick, message, is_whisper, match): if not is_whisper: return commands.set_direction('', message[6:]) def cmd_follow(nick, message, is_whisper, match): if not is_whisper: return if plugins.autofollow.follow == nick: plugins.autofollow.follow = '' else: set_npc_owner(nick) plugins.autofollow.follow = nick def cmd_lvlup(nick, message, is_whisper, match): if not is_whisper: return stat = match.group(1).lower() stats = {'str': 13, 'agi': 14, 'vit': 15, 'int': 16, 'dex': 17, 'luk': 18} skills = {'mallard': 45, 'brawling': 350, 'speed': 352, 'astral': 354, 'raging': 355, 'resist': 353} if stat in stats: mapserv.cmsg_stat_update_request(stats[stat], 1) elif stat in skills: mapserv.cmsg_skill_levelup_request(skills[stat]) def cmd_invlist(nick, message, is_whisper, match): if not is_whisper: return ls = status.invlists(50) for l in ls: whisper(nick, l) def cmd_inventory(nick, message, is_whisper, match): if not is_whisper: return ls = status.invlists2(255) for l in ls: whisper(nick, l) def cmd_status(nick, message, is_whisper, match): if not is_whisper: return all_stats = ('stats', 'hpmp', 'weight', 'points', 'zeny', 'attack', 'skills') sr = status.stats_repr(*all_stats) whisper(nick, ' | '.join(sr.values())) def cmd_zeny(nick, message, is_whisper, match): if not is_whisper: return whisper(nick, 'I have {} GP'.format(mapserv.player_money)) def cmd_talk2npc(nick, message, is_whisper, match): if not is_whisper: return npc_s = match.group(1) jobs = [] name = '' try: jobs = [int(npc_s)] except ValueError: name = npc_s b = find_nearest_being(name=name, type='npc', allowed_jobs=jobs) if b is None: return set_npc_owner(nick) plugins.autofollow.follow = '' plugins.npc.npc_id = b.id mapserv.cmsg_npc_talk(b.id) def cmd_input(nick, message, is_whisper, match): if not is_whisper: return plugins.npc.cmd_npcinput('', match.group(1)) def cmd_close(nick, message, is_whisper, match): if not is_whisper: return if storage_is_open: reset_storage() else: plugins.npc.cmd_npcclose() def cmd_history(nick, message, is_whisper, match): if not is_whisper: return for user, cmd in history: whisper(nick, '{} : {}'.format(user, cmd)) def cmd_store(nick, message, is_whisper, match): if not is_whisper: return if not storage_is_open: return try: amount = int(match.group(1)) item_id = int(match.group(2)) except ValueError: return index = get_item_index(item_id) if index > 0: mapserv.cmsg_move_to_storage(index, amount) def cmd_retrieve(nick, message, is_whisper, match): if not is_whisper: return if not storage_is_open: return try: amount = int(match.group(1)) item_id = int(match.group(2)) except ValueError: return index = get_storage_index(item_id) if index > 0: mapserv.cmsg_move_from_storage(index, amount) def cmd_help(nick, message, is_whisper, match): if not is_whisper: return m = ('[@@https://forums.themanaworld.org/viewtopic.php?f=12&t=19673|Forum@@]' '[@@https://bitbucket.org/rumly111/manachat|Sources@@] ' 'Try !commands for list of commands') whisper(nick, m) def cmd_commands(nick, message, is_whisper, match): if not is_whisper: return c = [] for cmd in manaboy_commands: if cmd.startswith('!('): br = cmd.index(')') c.extend(cmd[2:br].split('|')) elif cmd.startswith('!'): c.append(cmd[1:].split()[0]) c.sort() whisper(nick, ', '.join(c)) def reset_storage(): mapserv.cmsg_storage_close() mapserv.cmsg_npc_list_choice(plugins.npc.npc_id, 6) # ========================================================================= def manaboy_logic(ts): def reset(): global npc_owner npc_owner = '' npcdialog['start_time'] = -1 plugins.npc.cmd_npcinput('', '6') # plugins.npc.cmd_npcclose() if storage_is_open and ts > _times['storage'] + 150: reset_storage() if npcdialog['start_time'] <= 0: return if not storage_is_open and ts > npcdialog['start_time'] + 30.0: reset() # ========================================================================= manaboy_commands = { '!where' : cmd_where, '!goto (\d+) (\d+)' : cmd_goto, '!(left|right|up|down)' : cmd_goclose, '!pickup' : cmd_pickup, '!drop (\d+) (\d+)' : cmd_drop, '!equip (\d+)' : cmd_item_action, '!unequip (\d+)' : cmd_item_action, '!use (\d+)' : cmd_item_action, '!emote (\d+)' : cmd_emote, '!attack (.+)' : cmd_attack, '!say ((@|#).+)' : cmd_say, '!sit' : cmd_sit, '!turn' : cmd_turn, '!follow' : cmd_follow, '!lvlup (\w+)' : cmd_lvlup, '!inventory' : cmd_inventory, '!invlist' : cmd_invlist, '!status' : cmd_status, '!zeny' : cmd_zeny, '!talk2npc (\w+)' : cmd_talk2npc, '!input (.+)' : cmd_input, '!close' : cmd_close, '!store (\d+) (\d+)' : cmd_store, '!retrieve (\d+) (\d+)' : cmd_retrieve, '!(help|info)' : cmd_help, '!commands' : cmd_commands, '!history' : cmd_history, } def chatbot_answer_mod(func): '''modifies chatbot.answer to remember last 10 commands''' def mb_answer(nick, message, is_whisper): if is_whisper: history.append((nick, message)) return func(nick, message, is_whisper) return mb_answer def init(config):<|fim▁hole|> plugins.chatbot.answer = chatbot_answer_mod(plugins.chatbot.answer) logicmanager.logic_manager.add_logic(manaboy_logic)<|fim▁end|>
for cmd, action in manaboy_commands.items(): plugins.chatbot.add_command(cmd, action)
<|file_name|>manage_user.py<|end_file_name|><|fim▁begin|>""" Support tool for disabling user accounts. """ from django.contrib.auth import get_user_model from django.db.models import Q from django.urls import reverse from django.utils.decorators import method_decorator from django.utils.translation import ugettext as _ from django.views.generic import View from rest_framework.generics import GenericAPIView from edxmako.shortcuts import render_to_response from lms.djangoapps.support.decorators import require_support_permission from openedx.core.djangoapps.user_api.accounts.serializers import AccountUserSerializer from openedx.core.djangoapps.user_authn.utils import generate_password from util.json_request import JsonResponse class ManageUserSupportView(View): """ View for viewing and managing user accounts, used by the support team. """ @method_decorator(require_support_permission) def get(self, request): """Render the manage user support tool view.""" return render_to_response('support/manage_user.html', { _('username'): request.GET.get('user', ''), _('user_support_url'): reverse('support:manage_user'), _('user_detail_url'): reverse('support:manage_user_detail') }) class ManageUserDetailView(GenericAPIView): """ Allows viewing and disabling learner accounts by support staff. """ # TODO: ARCH-91 # This view is excluded from Swagger doc generation because it # does not specify a serializer class. exclude_from_schema = True @method_decorator(require_support_permission) def get(self, request, username_or_email): """ Returns details for the given user, along with information about its username and joining date. """ try: user = get_user_model().objects.get( Q(username=username_or_email) | Q(email=username_or_email) ) data = AccountUserSerializer(user, context={'request': request}).data data['status'] = _('Usable') if user.has_usable_password() else _('Unusable') return JsonResponse(data) except get_user_model().DoesNotExist: return JsonResponse([]) @method_decorator(require_support_permission) def post(self, request, username_or_email): """Allows support staff to disable a user's account.""" user = get_user_model().objects.get( Q(username=username_or_email) | Q(email=username_or_email) ) if user.has_usable_password(): user.set_unusable_password() else:<|fim▁hole|> user.save() if user.has_usable_password(): password_status = _('Usable') msg = _('User Enabled Successfully') else: password_status = _('Unusable') msg = _('User Disabled Successfully') return JsonResponse({'success_msg': msg, 'status': password_status})<|fim▁end|>
user.set_password(generate_password(length=25))
<|file_name|>test_suite_b.py<|end_file_name|><|fim▁begin|># Suite B tests # Copyright (c) 2014-2015, Jouni Malinen <[email protected]> # # This software may be distributed under the terms of the BSD license. # See README for more details. import time import logging logger = logging.getLogger() import hostapd from utils import HwsimSkip def test_suite_b(dev, apdev): """WPA2-PSK/GCMP connection at Suite B 128-bit level""" if "GCMP" not in dev[0].get_capability("pairwise"): raise HwsimSkip("GCMP not supported") if "BIP-GMAC-128" not in dev[0].get_capability("group_mgmt"): raise HwsimSkip("BIP-GMAC-128 not supported") if "WPA-EAP-SUITE-B" not in dev[0].get_capability("key_mgmt"): raise HwsimSkip("WPA-EAP-SUITE-B not supported") tls = dev[0].request("GET tls_library") if not tls.startswith("OpenSSL"): raise HwsimSkip("TLS library not supported for Suite B: " + tls); if "build=OpenSSL 1.0.2" not in tls or "run=OpenSSL 1.0.2" not in tls: raise HwsimSkip("OpenSSL version not supported for Suite B: " + tls) dev[0].flush_scan_cache() params = { "ssid": "test-suite-b", "wpa": "2", "wpa_key_mgmt": "WPA-EAP-SUITE-B", "rsn_pairwise": "GCMP", "group_mgmt_cipher": "BIP-GMAC-128", "ieee80211w": "2", "ieee8021x": "1", "openssl_ciphers": "SUITEB128", #"dh_file": "auth_serv/dh.conf", "eap_server": "1", "eap_user_file": "auth_serv/eap_user.conf", "ca_cert": "auth_serv/ec-ca.pem", "server_cert": "auth_serv/ec-server.pem", "private_key": "auth_serv/ec-server.key" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B", ieee80211w="2", openssl_ciphers="SUITEB128", eap="TLS", identity="tls user", ca_cert="auth_serv/ec-ca.pem", client_cert="auth_serv/ec-user.pem", private_key="auth_serv/ec-user.key", pairwise="GCMP", group="GCMP", scan_freq="2412") tls_cipher = dev[0].get_status_field("EAP TLS cipher") if tls_cipher != "ECDHE-ECDSA-AES128-GCM-SHA256": raise Exception("Unexpected TLS cipher: " + tls_cipher) bss = dev[0].get_bss(apdev[0]['bssid']) if 'flags' not in bss: raise Exception("Could not get BSS flags from BSS table") if "[WPA2-EAP-SUITE-B-GCMP]" not in bss['flags']: raise Exception("Unexpected BSS flags: " + bss['flags']) dev[0].request("DISCONNECT") dev[0].wait_disconnected(timeout=20) dev[0].dump_monitor() dev[0].request("RECONNECT") ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED", "CTRL-EVENT-CONNECTED"], timeout=20) if ev is None: raise Exception("Roaming with the AP timed out") if "CTRL-EVENT-EAP-STARTED" in ev: raise Exception("Unexpected EAP exchange") def test_suite_b_192(dev, apdev): """WPA2-PSK/GCMP-256 connection at Suite B 192-bit level""" if "GCMP-256" not in dev[0].get_capability("pairwise"): raise HwsimSkip("GCMP-256 not supported") if "BIP-GMAC-256" not in dev[0].get_capability("group_mgmt"): raise HwsimSkip("BIP-GMAC-256 not supported") if "WPA-EAP-SUITE-B-192" not in dev[0].get_capability("key_mgmt"): raise HwsimSkip("WPA-EAP-SUITE-B-192 not supported") tls = dev[0].request("GET tls_library") if not tls.startswith("OpenSSL"): raise HwsimSkip("TLS library not supported for Suite B: " + tls); if "build=OpenSSL 1.0.2" not in tls or "run=OpenSSL 1.0.2" not in tls: raise HwsimSkip("OpenSSL version not supported for Suite B: " + tls) dev[0].flush_scan_cache() params = { "ssid": "test-suite-b", "wpa": "2", "wpa_key_mgmt": "WPA-EAP-SUITE-B-192", "rsn_pairwise": "GCMP-256", "group_mgmt_cipher": "BIP-GMAC-256", "ieee80211w": "2", "ieee8021x": "1", "openssl_ciphers": "SUITEB192", "eap_server": "1", "eap_user_file": "auth_serv/eap_user.conf", "ca_cert": "auth_serv/ec2-ca.pem", "server_cert": "auth_serv/ec2-server.pem", "private_key": "auth_serv/ec2-server.key" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) dev[0].connect("test-suite-b", key_mgmt="WPA-EAP-SUITE-B-192", ieee80211w="2", openssl_ciphers="SUITEB192", eap="TLS", identity="tls user", ca_cert="auth_serv/ec2-ca.pem", client_cert="auth_serv/ec2-user.pem", private_key="auth_serv/ec2-user.key", pairwise="GCMP-256", group="GCMP-256", scan_freq="2412") tls_cipher = dev[0].get_status_field("EAP TLS cipher") if tls_cipher != "ECDHE-ECDSA-AES256-GCM-SHA384": raise Exception("Unexpected TLS cipher: " + tls_cipher) bss = dev[0].get_bss(apdev[0]['bssid']) if 'flags' not in bss: raise Exception("Could not get BSS flags from BSS table") if "[WPA2-EAP-SUITE-B-192-GCMP-256]" not in bss['flags']: raise Exception("Unexpected BSS flags: " + bss['flags'])<|fim▁hole|> dev[0].request("DISCONNECT") dev[0].wait_disconnected(timeout=20) dev[0].dump_monitor() dev[0].request("RECONNECT") ev = dev[0].wait_event(["CTRL-EVENT-EAP-STARTED", "CTRL-EVENT-CONNECTED"], timeout=20) if ev is None: raise Exception("Roaming with the AP timed out") if "CTRL-EVENT-EAP-STARTED" in ev: raise Exception("Unexpected EAP exchange")<|fim▁end|>
<|file_name|>newmessage.py<|end_file_name|><|fim▁begin|>import re from .base import EventBuilder from .._misc import utils from .. import _tl from ..types import _custom class NewMessage(EventBuilder, _custom.Message): """ Represents the event of a new message. This event can be treated to all effects as a `Message <telethon.tl.custom.message.Message>`, so please **refer to its documentation** to know what you can do with this event. Members: message (`Message <telethon.tl.custom.message.Message>`): This is the only difference with the received `Message <telethon.tl.custom.message.Message>`, and will return the `telethon.tl.custom.message.Message` itself, not the text. See `Message <telethon.tl.custom.message.Message>` for the rest of available members and methods. pattern_match (`obj`): The resulting object from calling the passed ``pattern`` function. Here's an example using a string (defaults to regex match): >>> from telethon import TelegramClient, events >>> client = TelegramClient(...) >>> >>> @client.on(events.NewMessage(pattern=r'hi (\\w+)!')) ... async def handler(event): ... # In this case, the result is a ``Match`` object ... # since the `str` pattern was converted into ... # the ``re.compile(pattern).match`` function. ... print('Welcomed', event.pattern_match.group(1)) ... >>> Example .. code-block:: python import asyncio from telethon import events @client.on(events.NewMessage(pattern='(?i)hello.+')) async def handler(event): # Respond whenever someone says "Hello" and something else await event.reply('Hey!') @client.on(events.NewMessage(outgoing=True, pattern='!ping')) async def handler(event): # Say "!pong" whenever you send "!ping", then delete both messages m = await event.respond('!pong') await asyncio.sleep(5) await client.delete_messages(event.chat_id, [event.id, m.id]) """ @classmethod def _build(cls, client, update, entities): if isinstance(update, (_tl.UpdateNewMessage, _tl.UpdateNewChannelMessage)): if not isinstance(update.message, _tl.Message): return # We don't care about MessageService's here msg = update.message elif isinstance(update, _tl.UpdateShortMessage): msg = _tl.Message( out=update.out, mentioned=update.mentioned, media_unread=update.media_unread, silent=update.silent, id=update.id, peer_id=_tl.PeerUser(update.user_id), from_id=_tl.PeerUser(self_id if update.out else update.user_id), message=update.message, date=update.date, fwd_from=update.fwd_from, via_bot_id=update.via_bot_id, reply_to=update.reply_to, entities=update.entities, ttl_period=update.ttl_period ) elif isinstance(update, _tl.UpdateShortChatMessage):<|fim▁hole|> mentioned=update.mentioned, media_unread=update.media_unread, silent=update.silent, id=update.id, from_id=_tl.PeerUser(self_id if update.out else update.from_id), peer_id=_tl.PeerChat(update.chat_id), message=update.message, date=update.date, fwd_from=update.fwd_from, via_bot_id=update.via_bot_id, reply_to=update.reply_to, entities=update.entities, ttl_period=update.ttl_period ) else: return return cls._new(client, msg, entities, None)<|fim▁end|>
msg = _tl.Message( out=update.out,
<|file_name|>solution_util.py<|end_file_name|><|fim▁begin|>import numpy as np <|fim▁hole|> numBandits, numArms = estimates.shape explore = np.zeros(numBandits) explore[np.random.random(numBandits) <= epsilon] = 1 arm = np.argmax(estimates, axis=1) arm[explore == 1] = np.random.randint(0, numArms, np.count_nonzero(explore)) return arm def softmax(estimates, temperature): temp_est = estimates.T / temperature exponents = np.exp(temp_est - np.max(temp_est)) dist = exponents / np.sum(exponents, axis=0) return (np.random.random(temp_est.shape) < dist.cumsum(axis=0)).argmax(axis=0) def pref_softmax(preferences): pref = preferences.T exponents = np.exp(pref - np.max(pref)) dist = exponents / np.sum(exponents, axis=0) return (np.random.random(pref.shape) < dist.cumsum(axis=0)).argmax(axis=0)<|fim▁end|>
def e_greedy(estimates, epsilon):
<|file_name|>test-iframe-transport.js<|end_file_name|><|fim▁begin|>/** * Copyright 2017 The AMP HTML Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { IframeTransport, getIframeTransportScriptUrl, } from '../iframe-transport'; import {addParamsToUrl} from '../../../../src/url'; import {expectPostMessage} from '../../../../testing/iframe.js'; import {urls} from '../../../../src/config'; import {user} from '../../../../src/log'; describes.realWin('amp-analytics.iframe-transport', {amp: true}, env => { let sandbox; let iframeTransport; const frameUrl = 'http://example.com'; beforeEach(() => { sandbox = env.sandbox; iframeTransport = new IframeTransport( env.ampdoc.win, 'some_vendor_type', {iframe: frameUrl}, frameUrl + '-1' ); }); afterEach(() => { IframeTransport.resetCrossDomainIframes(); }); function expectAllUnique(numArray) { if (!numArray) { return; } expect(numArray).to.have.lengthOf(new Set(numArray).size); } it('creates one frame per vendor type', () => { const createCrossDomainIframeSpy = sandbox.spy( iframeTransport, 'createCrossDomainIframe' ); expect(createCrossDomainIframeSpy).to.not.be.called; expect(IframeTransport.hasCrossDomainIframe(iframeTransport.getType())).to .be.true; iframeTransport.processCrossDomainIframe(); expect(createCrossDomainIframeSpy).to.not.be.called; }); it('enqueues event messages correctly', () => { const url = 'https://example.com/test'; const config = {iframe: url}; iframeTransport.sendRequest('hello, world!', config);<|fim▁hole|> expect(queue.queueSize()).to.equal(2); }); it('does not cause sentinel collisions', () => { const iframeTransport2 = new IframeTransport( env.ampdoc.win, 'some_other_vendor_type', {iframe: 'https://example.com/test2'}, 'https://example.com/test2-2' ); const frame1 = IframeTransport.getFrameData(iframeTransport.getType()); const frame2 = IframeTransport.getFrameData(iframeTransport2.getType()); expectAllUnique([ iframeTransport.getCreativeId(), iframeTransport2.getCreativeId(), frame1.frame.sentinel, frame2.frame.sentinel, ]); }); it('correctly tracks usageCount and destroys iframes', () => { const frameUrl2 = 'https://example.com/test2'; const iframeTransport2 = new IframeTransport( env.ampdoc.win, 'some_other_vendor_type', {iframe: frameUrl2}, frameUrl2 + '-3' ); const frame1 = IframeTransport.getFrameData(iframeTransport.getType()); const frame2 = IframeTransport.getFrameData(iframeTransport2.getType()); expect(frame1.usageCount).to.equal(1); expect(frame2.usageCount).to.equal(1); expect(env.win.document.getElementsByTagName('IFRAME')).to.have.lengthOf(2); // Mark the iframes as used multiple times each. iframeTransport.processCrossDomainIframe(); iframeTransport.processCrossDomainIframe(); iframeTransport2.processCrossDomainIframe(); iframeTransport2.processCrossDomainIframe(); iframeTransport2.processCrossDomainIframe(); expect(frame1.usageCount).to.equal(3); expect(frame2.usageCount).to.equal(4); // Stop using the iframes, make sure usage counts go to zero and they are // removed from the DOM. IframeTransport.markCrossDomainIframeAsDone( env.win.document, iframeTransport.getType() ); expect(frame1.usageCount).to.equal(2); IframeTransport.markCrossDomainIframeAsDone( env.win.document, iframeTransport.getType() ); IframeTransport.markCrossDomainIframeAsDone( env.win.document, iframeTransport.getType() ); expect(frame1.usageCount).to.equal(0); expect(frame2.usageCount).to.equal(4); // (Still) expect(env.win.document.getElementsByTagName('IFRAME')).to.have.lengthOf(1); IframeTransport.markCrossDomainIframeAsDone( env.win.document, iframeTransport2.getType() ); IframeTransport.markCrossDomainIframeAsDone( env.win.document, iframeTransport2.getType() ); IframeTransport.markCrossDomainIframeAsDone( env.win.document, iframeTransport2.getType() ); IframeTransport.markCrossDomainIframeAsDone( env.win.document, iframeTransport2.getType() ); expect(frame2.usageCount).to.equal(0); expect(env.win.document.getElementsByTagName('IFRAME')).to.have.lengthOf(0); }); it('creates one PerformanceObserver per vendor type', () => { const createPerformanceObserverSpy = sandbox.spy( IframeTransport.prototype, 'createPerformanceObserver_' ); expect(createPerformanceObserverSpy).to.not.be.called; iframeTransport.processCrossDomainIframe(); // Create 2nd frame for 1st vendor expect(createPerformanceObserverSpy).to.not.be.called; // Create frame for a new vendor const frameUrl2 = 'https://example.com/test2'; new IframeTransport( env.ampdoc.win, 'some_other_vendor_type', {iframe: frameUrl2}, frameUrl2 + '-3' ); expect(createPerformanceObserverSpy).to.be.called; }); it('gets correct client lib URL in local/test mode', () => { const url = getIframeTransportScriptUrl(env.ampdoc.win); expect(url).to.contain(env.win.location.host); expect(url).to.contain('/dist/iframe-transport-client-lib.js'); }); it('gets correct client lib URL in prod mode', () => { const url = getIframeTransportScriptUrl(env.ampdoc.win, true); expect(url).to.contain(urls.thirdParty); expect(url).to.contain('/iframe-transport-client-v0.js'); expect(url).to.equal( 'https://3p.ampproject.net/$internalRuntimeVersion$/' + 'iframe-transport-client-v0.js' ); }); }); describes.realWin( 'amp-analytics.iframe-transport', {amp: true, allowExternalResources: true}, env => { it('logs poor performance of vendor iframe', () => { const body = '<html><head><script>' + 'function busyWait(count, duration, cb) {\n' + ' if (count) {\n' + ' var d = new Date();\n' + ' var d2 = null;\n' + ' do {\n' + ' d2 = new Date();\n' + ' } while (d2-d < duration);\n' + // Note the semicolon! ' setTimeout(function() { ' + ' busyWait(count-1, duration, cb);' + ' },0);\n' + ' } else {\n' + ' cb();\n' + ' }\n' + '}\n' + 'function begin() {\n' + ' busyWait(5, 200, function() {\n' + ' window.parent.postMessage("doneSleeping", "*");\n' + ' });\n' + '}' + '</script></head>' + '<body onload="javascript:begin()">' + 'Non-Performant Fake Iframe' + '</body>' + '</html>'; const frameUrl2 = addParamsToUrl( 'http://ads.localhost:' + document.location.port + '/amp4test/compose-doc', {body} ); sandbox.stub(env.ampdoc.win.document.body, 'appendChild'); new IframeTransport( env.ampdoc.win, 'some_other_vendor_type', {iframe: frameUrl2}, frameUrl2 + '-3' ); sandbox.restore(); const errorSpy = sandbox.spy(user(), 'error'); const {frame} = IframeTransport.getFrameData('some_other_vendor_type'); frame.setAttribute('style', ''); env.ampdoc.win.document.body.appendChild(frame); return new Promise((resolve, unused) => { expectPostMessage( frame.contentWindow, env.ampdoc.win, 'doneSleeping' ).then(() => { expect(errorSpy).to.be.called; expect(errorSpy.args[0][1]).to.match( /Long Task: Vendor: "some_other_vendor_type"/ ); resolve(); }); }); }).timeout(10000); } );<|fim▁end|>
const {queue} = IframeTransport.getFrameData(iframeTransport.getType()); expect(queue.queueSize()).to.equal(1); iframeTransport.sendRequest('hello again, world!', config);
<|file_name|>AbstractView.java<|end_file_name|><|fim▁begin|>package org.turbogwt.mvp; import com.google.gwt.user.client.ui.Composite; /** * Abstract class for every View in the TurboGWT-MVP framework. * <p> * It provides access to its Presenter via the {@link #getPresenter} method. * * @param <P> the Presenter that is attached to this View, whenever it is displayed<|fim▁hole|>public abstract class AbstractView<P extends Presenter> extends Composite implements View<P> { private P presenter; public P getPresenter() { if (presenter == null) { throw new IllegalStateException("Presenter is not set. The Presenter must attach itself to the View via " + "#setPresenter, before the View can use it."); } return presenter; } public void setPresenter(P presenter) { this.presenter = presenter; } }<|fim▁end|>
* * @author Danilo Reinert */
<|file_name|>import-dialog.component.ts<|end_file_name|><|fim▁begin|>/** * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an 'AS IS' BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { Component, Inject, NgZone } from '@angular/core'; import { DataImportService } from './../../services/data-import/data-import.service'; import { FormBuilder, FormControl, FormGroup } from '@angular/forms'; import { MAT_DIALOG_DATA, MatDialogRef } from '@angular/material/dialog'; import { NotificationService } from '../../services/notification/notification.service'; @Component({ selector: 'app-import-dialog', templateUrl: './import-dialog.component.html', styleUrls: ['./import-dialog.component.scss'], }) export class ImportDialogComponent { private projectId: string; private layerId: string; public readonly acceptedExtensions = 'csv,geojson'; uploadForm: FormGroup; public files: Array<File> = []; isImporting = false; constructor( @Inject(MAT_DIALOG_DATA) public data: { projectId: string; layerId: string }, private formBuilder: FormBuilder, private dataImportService: DataImportService, private readonly dialogRef: MatDialogRef<ImportDialogComponent>, private readonly notificationService: NotificationService, private ngZone: NgZone ) { this.projectId = data.projectId; this.layerId = data.layerId; this.uploadForm = this.formBuilder.group({ file: new FormControl(), }); } async onImportFeatures(): Promise<void> { const files = this.uploadForm.get('file')?.value; if (!files || files.length === 0) { console.error('File missing'); return; } try { this.isImporting = true; const response = await this.dataImportService.importFeatures(<|fim▁hole|> files[0] as File ); this.notificationService.success( `Successfully imported ${response.count} features` ); } catch (err) { this.notificationService.error('Importing features failed'); } this.isImporting = false; this.ngZone.run(() => { this.dialogRef.close(); }); } }<|fim▁end|>
this.projectId, this.layerId,
<|file_name|>test_phishing.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """ Copyright 2017 Fedele Mantuano (https://www.linkedin.com/in/fmantuano/) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import logging import copy import os import unittest import mailparser from pyfaup.faup import Faup from context import mails from context import utils phishing = mails.phishing base_path = os.path.realpath(os.path.dirname(__file__)) mail_thug = os.path.join(base_path, 'samples', 'mail_thug') mail_form = os.path.join(base_path, 'samples', 'mail_form') mail_test_5 = os.path.join(base_path, 'samples', 'mail_test_5') mail_test_6 = os.path.join(base_path, 'samples', 'mail_test_6') logging.getLogger().addHandler(logging.NullHandler()) class TestPhishing(unittest.TestCase): faup = Faup() def setUp(self): parser = mailparser.parse_from_file(mail_thug) self.email = parser.mail self.attachments = parser.attachments parser = mailparser.parse_from_file(mail_form)<|fim▁hole|> body = self.email_form.get("body") self.urls = utils.urls_extractor(body, self.faup) d = {"generic": "conf/keywords/targets.example.yml", "custom": "conf/keywords/targets_english.example.yml"} self.targets = utils.load_keywords_dict(d) d = {"generic": "conf/keywords/subjects.example.yml", "custom": "conf/keywords/subjects_english.example.yml"} self.subjects = utils.load_keywords_list(d) def test_ParserError(self): parser = mailparser.parse_from_file(mail_test_6) body = parser.mail.get("body") flag_form = phishing.check_form(body) self.assertFalse(flag_form) def test_none_values(self): email = copy.deepcopy(self.email) email.pop("body", None) email.pop("subjects", None) email.pop("from", None) phishing.check_phishing( email=email, attachments=self.attachments, urls_body=self.urls, urls_attachments=self.urls, target_keys=self.targets, subject_keys=self.subjects) def test_check_form(self): body = self.email_form.get("body") flag_form = phishing.check_form(body) self.assertTrue(flag_form) body = self.email.get("body") flag_form = phishing.check_form(body) self.assertFalse(flag_form) def test_form_value_error(self): parser = mailparser.parse_from_file(mail_test_5) body = parser.mail.get("body") flag_form = phishing.check_form(body) self.assertFalse(flag_form) def test_check_urls(self): flag = False if any(phishing.check_urls(self.urls, i) for i in self.targets.values()): flag = True self.assertTrue(flag) def test_check_phishing(self): results = phishing.check_phishing( email=self.email, attachments=self.attachments, urls_body=self.urls, urls_attachments=self.urls, target_keys=self.targets, subject_keys=self.subjects) self.assertIsInstance(results, dict) self.assertEqual(results["score"], 123) self.assertIn("filename_attachments", results["score_expanded"]) self.assertIn("mail_subject", results["score_expanded"]) self.assertIn("mail_body", results["score_expanded"]) self.assertIn("mail_from", results["score_expanded"]) self.assertIn("urls_body", results["score_expanded"]) self.assertIn("urls_attachments", results["score_expanded"]) self.assertIn("Test", results["targets"]) self.assertTrue(results["with_phishing"]) def test_check_phishing_form(self): results = phishing.check_phishing( email=self.email_form, attachments=self.attachments, urls_body=self.urls, urls_attachments=self.urls, target_keys=self.targets, subject_keys=self.subjects) self.assertIn("mail_form", results["score_expanded"]) if __name__ == '__main__': unittest.main(verbosity=2)<|fim▁end|>
self.email_form = parser.mail
<|file_name|>absoluteLoad.ts<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2015-present, Facebook, Inc. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ export default () => [ { id: 1, name: '1' }, { id: 2, name: '2' },<|fim▁hole|> { id: 3, name: '3' }, { id: 4, name: '4' }, ];<|fim▁end|>
<|file_name|>log_manager.py<|end_file_name|><|fim▁begin|># Authors: John Dennis <[email protected]> # # Copyright (C) 2011 Red Hat # see file 'COPYING' for use and warranty information # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. ''' Quick Start Guide For Using This Module ======================================= This module implements a Log Manager class which wraps the Python logging module and provides some utility functions for use with logging. All logging operations should be done through the `LogManager` where available. *DO NOT create objects using the Python logging module, the log manager will be unaware of them.* This module was designed for ease of use while preserving advanced functionality and performance. You must perform the following steps. 1. Import the log_manger module and instantiate *one* `LogManager` instance for your application or library. The `LogManager` is configured via `LogManager.configure()` whose values are easily populated from command line options or a config file. You can modify the configuration again at any point. 2. Create one or more output handlers via `LogManager.create_log_handlers()` an easy to use yet powerful interface. 3. In your code create loggers via `LogManager.get_logger()`. Since loggers are normally bound to a class this method is optimized for that case, all you need to do in the call ``__init__()`` is:: log_mgr.get_logger(self, True) Then emitting messages is as simple as ``self.debug()`` or ``self.error()`` Example: -------- :: # Step 1, Create log manager and configure it prog_name = 'my_app' log_mgr = LogManager(prog_name) log_mgr.configure(dict(verbose=True)) # Step 2, Create handlers log_mgr.create_log_handlers([dict(name='my_app stdout', stream=sys.stdout, level=logging.INFO), dict(name='my_app file', filename='my_app.log', level=logging.DEBUG)]) # Step 3, Create and use a logger in your code class FooBar: def __init__(self, name): log_mgr.get_logger(self, True) self.info("I'm alive! %s", name) foobar = FooBar('Dr. Frankenstein') # Dump the log manager state for illustration print print log_mgr Running the above code would produce:: <INFO>: I'm alive! Dr. Frankenstein root_logger_name: my_app configure_state: None default_level: INFO debug: False verbose: True number of loggers: 2 "my_app" [level=INFO] "my_app.__main__.FooBar" [level=INFO] number of handlers: 2 "my_app file" [level=DEBUG] "my_app stdout" [level=INFO] number of logger regexps: 0 *Note, Steps 1 & 2 were broken out for expository purposes.* You can pass your handler configuration into `LogManager.configure()`. The above could have been simpler and more compact.:: # Step 1 & 2, Create log manager, and configure it and handlers prog_name = 'my_app' log_mgr = LogManager(prog_name) log_mgr.configure(dict(verbose=True, handlers = [dict(name='my_app stdout', stream=sys.stdout, level=logging.INFO), dict(name='my_app file', filename='my_app.log', level=logging.DEBUG)]) FAQ (Frequently Asked Questions) ================================ #. **Why is this better than logging.basicConfig? The short example for the LogManager doesn't seem much different in complexity from basicConfig?** * You get independent logging namespaces. You can instantiate multiple logging namespaces. If you use this module you'll be isolated from other users of the Python logging module avoiding conflicts. * Creating and initializing loggers for classes is trivial. One simple call creates the logger, configures it, and sets logging methods on the class instance. * You can easily configure individual loggers to different levels. For example turn on debuging for just the part of the code you're working on. * The configuration is both simple and powerful. You get many more options than with basicConfig. * You can dynamically reset the logging configuration during execution, you're not forced to live with the config established during program initialization. * The manager optimizes the use of the logging objects, you'll spend less time executing pointless logging code for messages that won't be emitted. * You can see the state of all the logging objects in your namespace from one centrally managed location. * You can configure a LogManager to use the standard logging root logger and get all the benefits of this API. #. **How do I turn on debug logging for a specific class without affecting the rest of the logging configuration?** Use a logger regular expression to bind a custom level to loggers whose name matches the regexp. See `LogManager.configure()` for details. Lets say you want to set your Foo.Bar class to debug, then do this:: log_mgr.configure(dict(logger_regexps=[(r'Foo\.Bar', 'debug')])) #. **I set the default_level but all my loggers are configured with a higher level, what happened?** You probably don't have any handlers defined at or below the default_level. The level set on a logger will never be lower than the lowest level handler available to that logger. #. **My logger's all have their level set to a huge integer, why?** See above. Logger's will never have a level less than the level of the handlers visible to the logger. If there are no handlers then loggers can't output anything so their level is set to maxsize. #. **I set the default_level but all the loggers are configured at INFO or DEBUG, what happened?** The verbose and debug config flags set the default_level to INFO and DEBUG respectively as a convenience. #. **I'm not seeing messages output when I expect them to be, what's wrong?** For a message to be emitted the following 3 conditions must hold: * Message level >= logger's level * Message level >= handler's level * The message was not elided by a filter To verify the above conditions hold print out the log manager state (e.g. print log_mgr). Locate your logger, what level is at? Locate the handler you expected to see the message appear on, what level is it? A General Discussion of Python Logging ====================================== The design of this module is driven by how the Python logging module works. The following discussion complements the Python Logging Howto, fills in some missing information and covers strategies for implementing different functionality along with the trade-offs involved. Understanding when & how log messages are emitted: -------------------------------------------------- Loggers provide the application interface for logging. Every logger object has the following methods debug(), info(), warning(), error(), critical(), exception() and log() all of which can accept a format string and arguments. Applications generate logging messages by calling one of these methods to produce a formatted message. A logger's effective level is the first explicitly set level found when searching from the logger through it's ancestors terminating at the root logger. The root logger always has an explicit level (defaults to WARNING). For a message to be emitted by a handler the following must be true: The logger's effective level must >= message level and it must not be filtered by a filter attached to the logger, otherwise the message is discarded. If the message survives the logger check it is passed to a list of handlers. A handler will emit the message if the handler's level >= message level and its not filtered by a filter attached to the handler. The list of handlers is determined thusly: Each logger has a list of handlers (which may be empty). Starting with the logger the message was bound to the message is passed to each of it's handlers. Then the process repeats itself by traversing the chain of loggers through all of it's ancestors until it reaches the root logger. The logger traversal will be terminated if the propagate flag on a logger is False (by default propagate is True). Let's look at a hypothetical logger hierarchy (tree):: A / \\ B D / C There are 4 loggers and 3 handlers Loggers: +-------+---------+---------+-----------+----------+ |Logger | Level | Filters | Propagate | Handlers | +=======+=========+=========+===========+==========+ | A | WARNING | [] | False | [h1,h2] | +-------+---------+---------+-----------+----------+ | A.B | ERROR | [] | False | [h3] | +-------+---------+---------+-----------+----------+ | A.B.C | DEBUG | [] | True | | +-------+---------+---------+-----------+----------+ | A.D | | [] | True | | +-------+---------+---------+-----------+----------+ Handlers: +---------+---------+---------+ | Handler | Level | Filters | +=========+=========+=========+ | h1 | ERROR | [] | +---------+---------+---------+ | h2 | WARNING | [] | +---------+---------+---------+ | h3 | DEBUG | [] | +---------+---------+---------+ Each of the loggers and handlers have empty filter lists in this example thus the filter checks will always pass. If a debug message is posted logger A.B.C the following would happen. The effective level is determined. Since it does not have a level set it's parent (A.B) is examined which has ERROR set, therefore the effective level of A.B.C is ERROR. Processing immediately stops because the logger's level of ERROR does not permit debug messages. If an error message is posted on logger A.B.C it passes the logger level check and filter check therefore the message is passed along to the handlers. The list of handlers on A.B.C is empty so no handlers are called at this position in the logging hierarchy. Logger A.B.C's propagate flag is True so parent logger A.B handlers are invoked. Handler h3's level is DEBUG, it passes both the level and filter check thus h3 emits the message. Processing now stops because logger A.B's propagate flag is False. Now let's see what would happen if a warning message was posted on logger A.D. It's effective level is WARNING because logger A.D does not have a level set, it's only ancestor is logger A, the root logger which has a level of WARNING, thus logger's A.D effective level is WARNING. Logger A.D has no handlers, it's propagate flag is True so the message is passed to it's parent logger A, the root logger. Logger A has two handlers h1 and h2. The level of h1 is ERROR so the warning message is discarded by h1, nothing is emitted by h1. Next handler h2 is invoked, it's level is WARNING so it passes both the level check and the filter check, thus h2 emits the warning message. How to configure independent logging spaces: -------------------------------------------- A common idiom is to hang all handlers off the root logger and set the root loggers level to the desired verbosity. But this simplistic approach runs afoul of several problems, in particular who controls logging (accomplished by configuring the root logger). The usual advice is to check and see if the root logger has any handlers set, if so someone before you has configured logging and you should inherit their configuration, all you do is add your own loggers without any explicitly set level. If the root logger doesn't have handlers set then you go ahead and configure the root logger to your preference. The idea here is if your code is being loaded by another application you want to defer to that applications logging configuration but if your code is running stand-alone you need to set up logging yourself. But sometimes your code really wants it's own logging configuration managed only by yourself completely independent of any logging configuration by someone who may have loaded your code. Even if you code is not designed to be loaded as a package or module you may be faced with this problem. A trivial example of this is running your code under a unit test framework which itself uses the logging facility (remember there is only ever one root logger in any Python process). Fortunately there is a simple way to accommodate this. All you need to do is create a "fake" root in the logging hierarchy which belongs to you. You set your fake root's propagate flag to False, set a level on it and you'll hang your handlers off this fake root. Then when you create your loggers each should be a descendant of this fake root. Now you've completely isolated yourself in the logging hierarchy and won't be influenced by any other logging configuration. As an example let's say your your code is called 'foo' and so you name your fake root logger 'foo'.:: my_root = logging.getLogger('foo') # child of the root logger my_root.propagate = False my_root.setLevel(logging.DEBUG) my_root.addHandler(my_handler) Then every logger you create should have 'foo.' prepended to it's name. If you're logging my module your module's logger would be created like this:: module_logger = logging.getLogger('foo.%s' % __module__) If you're logging by class then your class logger would be:: class_logger = logging.getLogger('foo.%s.%s' % (self.__module__, self.__class__.__name__)) How to set levels: ------------------ An instinctive or simplistic assumption is to set the root logger to a high logging level, for example ERROR. After all you don't want to be spamming users with debug and info messages. Let's also assume you've got two handlers, one for a file and one for the console, both attached to the root logger (a common configuration) and you haven't set the level on either handler (in which case the handler will emit all levels). But now let's say you want to turn on debugging, but just to the file, the console should continue to only emit error messages. You set the root logger's level to DEBUG. The first thing you notice is that you're getting debug message both in the file and on the console because the console's handler does not have a level set. Not what you want. So you go back restore the root loggers level back to it's original ERROR level and set the file handler's level to DEBUG and the console handler's level to ERROR. Now you don't get any debug messages because the root logger is blocking all messages below the level of ERROR and doesn't invoke any handlers. The file handler attached to the root logger even though it's level is set to DEBUG never gets a chance to process the message. *IMPORTANT:* You have to set the logger's level to the minimum of all the attached handler's levels, otherwise the logger may block the message from ever reaching any handler. In this example the root logger's level must be set to DEBUG, the file handler's level to DEBUG, and the console handler's level set to ERROR. Now let's take a more real world example which is a bit more complicated. It's typical to assign loggers to every major class. In fact this is the design strategy of Java logging from which the Python logging is modeled. In a large complex application or library that means dozens or possibly hundreds of loggers. Now lets say you need to trace what is happening with one class. If you use the simplistic configuration outlined above you'll set the log level of the root logger and one of the handlers to debug. Now you're flooded with debug message from every logger in the system when all you wanted was the debug messages from just one class. How can you get fine grained control over which loggers emit debug messages? Here are some possibilities: (1) Set a filter. ................. When a message is propagated to a logger in the hierarchy first the loggers level is checked. If logger level passes then the logger iterates over every handler attached to the logger first checking the handler level. If the handler level check passes then the filters attached to the handler are run. Filters are passed the record (i.e. the message), it does not have access to either the logger or handler it's executing within. You can't just set the filter to only pass the records of the classes you want to debug because that would block other important info, warning, error and critical messages from other classes. The filter would have to know about the "global" log level which is in effect and also pass any messages at that level or higher. It's unfortunate the filter cannot know the level of the logger or handler it's executing inside of. Also logger filters only are applied to the logger they are attached to, i.e. the logger the message was generated on. They do not get applied to any ancestor loggers. That means you can't just set a filter on the root logger. You have to either set the filters on the handlers or on every logger created. The filter first checks the level of the message record. If it's greater than debug it passes it. For debug messages it checks the set of loggers which have debug messages enabled, if the message record was generated on one of those loggers it passes the record, otherwise it blocks it. The only question is whether you attach the filter to every logger or to a handful of handlers. The advantage of attaching the filter to every logger is efficiency, the time spent handling the message can be short circuited much sooner if the message is filtered earlier in the process. The advantage of attaching the filter to a handler is simplicity, you only have to do that when a handler is created, not every place in the code where a logger is created. (2) Conditionally set the level of each logger. ............................................... When loggers are created a check is performed to see if the logger is in the set of loggers for which debug information is desired, if so it's level is set to DEBUG, otherwise it's set to the global level. One has to recall there really isn't a single global level if you want some handlers to emit info and above, some handlers error and above, etc. In this case if the logger is not in the set of logger's emitting debug the logger level should be set to the next increment above debug level. A good question to ask would be why not just leave the logger's level unset if it's not in the set of loggers to be debugged? After all it will just inherit the root level right? There are two problems with that. 1) It wold actually inherit the level any ancestor logger and if an ancestor was set to debug you've effectively turned on debugging for all children of that ancestor logger. There are times you might want that behavior, where all your children inherit your level, but there are many cases where that's not the behavior you want. 2) A more pernicious problem exists. The logger your handlers are attached to MUST be set to debug level, otherwise your debug messages will never reach the handlers for output. Thus if you leave a loggers level unset and let it inherit it's effective level from an ancestor it might very well inherit the debug level from the root logger. That means you've completely negated your attempt to selectively set debug logging on specific loggers. Bottom line, you really have to set the level on every logger created if you want fine grained control. Approach 2 has some distinct performance advantages. First of all filters are not used, this avoids a whole processing step and extra filter function calls on every message. Secondly a logger level check is a simple integer compare which is very efficient. Thirdly the processing of a message can be short circuited very early in the processing pipeline, no ancestor loggers will be invoked and no handlers will be invoked. The downside is some added complexity at logger creation time. But this is easily mitigated by using a utility function or method to create the logger instead of just calling logger.getLogger(). Like every thing else in computer science which approach you take boils down to a series of trade offs, most around how your code is organized. You might find it easier to set a filter on just one or two handlers. It might be easier to modify the configuration during execution if the logic is centralized in just a filter function, but don't let that sway you too much because it's trivial to iterate over every logger and dynamically reset it's log level. Now at least you've got a basic understanding of how this stuff hangs together and what your options are. That's not insignificant, when I was first introduced to logging in Java and Python I found it bewildering difficult to get it do what I wanted. John Dennis <[email protected]> ''' from __future__ import print_function #------------------------------------------------------------------------------- import sys import os import pwd import logging import re import time import six #------------------------------------------------------------------------------- # Default format LOGGING_DEFAULT_FORMAT = '%(levelname)s %(message)s' # Maps a logging level name to it's numeric value log_level_name_map = { 'notset' : logging.NOTSET, 'debug' : logging.DEBUG, 'info' : logging.INFO, 'warn' : logging.WARNING, 'warning' : logging.WARNING, 'error' : logging.ERROR, 'critical' : logging.CRITICAL } log_levels = (logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL) logger_method_names = ('debug', 'info', 'warning', 'error', 'exception', 'critical') #------------------------------------------------------------------------------- def get_unique_levels(iterable): ''' Given a iterable of objects containing a logging level return a ordered list (min to max) of unique levels. :parameters: iterable Iterable yielding objects with a logging level attribute. :returns: Ordered list (min to max) of unique levels. ''' levels = set() for obj in iterable: level = getattr(obj, 'level', sys.maxsize) if level != logging.NOTSET: levels.add(level) levels = list(levels) levels.sort() return levels def get_minimum_level(iterable): ''' Given a iterable of objects containing a logging level return the minimum level. If no levels are defined return maxsize. set of unique levels. :parameters: iterable Iterable yielding objects with a logging level attribute. :returns: Ordered list (min to max) of unique levels. ''' min_level = sys.maxsize for obj in iterable: level = getattr(obj, 'level', sys.maxsize) if level != logging.NOTSET: if level < min_level: min_level = level return min_level def parse_log_level(level): ''' Given a log level either as a string or integer return a numeric logging level. The following case insensitive names are recognized:: * notset * debug * info * warn * warning * error * critical A string containing an integer is also recognized, for example ``"10"`` would map to ``logging.DEBUG`` The integer value must be the range [``logging.NOTSET``, ``logging.CRITICAL``] otherwise a value exception will be raised. :parameters: level basestring or integer, level value to convert :returns: integer level value ''' # Is it a string representation of an integer? # If so convert to an int. if isinstance(level, six.string_types): try: level = int(level) except ValueError: pass # If it's a string lookup it's name and map to logging level # otherwise validate the integer value is in range. if isinstance(level, six.string_types): result = log_level_name_map.get(level.lower()) #pylint: disable=E1103 if result is None: raise ValueError('unknown log level (%s)' % level) return result elif isinstance(level, int): if level < logging.NOTSET or level > logging.CRITICAL: raise ValueError('log level (%d) out of range' % level) return level else: raise TypeError('log level must be basestring or int, got (%s)' % type(level)) #------------------------------------------------------------------------------- def logging_obj_str(obj): ''' Unfortunately the logging Logger and Handler classes do not have a custom __str__() function which converts the object into a human readable string representation. This function takes any object with a level attribute and outputs the objects name with it's associated level. If a name was never set for the object then it's repr is used instead. :parameters: obj Object with a logging level attribute :returns: string describing the object ''' name = getattr(obj, 'name', repr(obj)) text = '"%s" [level=%s]' % (name, logging.getLevelName(obj.level)) if isinstance(obj, logging.FileHandler): text += ' filename="%s"' % obj.baseFilename return text #------------------------------------------------------------------------------- class LogManager(object): ''' This class wraps the functionality in the logging module to provide an easier to use API for logging while providing advanced features including a independent namespace. Each application or library wishing to have it's own logging namespace should instantiate exactly one instance of this class and use it to manage all it's logging. Traditionally (or simplistically) logging was set up with a single global root logger with output handlers bound to it. The global root logger (whose name is the empty string) was shared by all code in a loaded process. The only the global unamed root logger had a level set on it, all other loggers created inherited this global level. This can cause conflicts in more complex scenarios where loaded code wants to maintain it's own logging configuration independent of whomever loaded it's code. By using only a single logger level set on the global root logger it was not possible to have fine grained control over individual logger output. The pattern seen with this simplistic setup has been frequently copied despite being clumsy and awkward. The logging module has the tools available to support a more sophisitcated and useful model, but it requires an overarching framework to manage. This class provides such a framework. The features of this logging manager are: * Independent logging namespace. * Simplifed method to create handlers. * Simple setup for applications with command line args. * Sophisitcated handler configuration (e.g. file ownership & permissions) * Easy fine grained control of logger output (e.g. turning on debug for just 1 or 2 loggers) * Holistic management of the interrelationships between logging components. * Ability to dynamically adjust logging configuration in a running process. An independent namespace is established by creating a independent root logger for this manager (root_logger_name). This root logger is a direct child of the global unamed root logger. All loggers created by this manager will be descendants of this managers root logger. The managers root logger has it's propagate flag set to False which means all loggers and handlers created by this manager will be isolated in the global logging tree. Log level management: --------------------- Traditionally loggers inherited their logging level from the root logger. This was simple but made it impossible to independently control logging output from different loggers. If you set the root level to DEBUG you got DEBUG output from every logger in the system, often overwhelming in it's voluminous output. Many times you want to turn on debug for just one class (a common idom is to have one logger per class). To achieve the fine grained control you can either use filters or set a logging level on every logger (see the module documentation for the pros and cons). This manager sets a log level on every logger instead of using level inheritence because it's more efficient at run time. <|fim▁hole|> setting every logger level to INFO and DEBUG respectively. Fine grained level control is provided via regular expression matching on logger names (see `configure()` for the details. For example if you want to set a debug level for the foo.bar logger set a regular expression to match it and bind it to the debug level. Note, the global verbose and debug flags always override the regular expression level configuration. Do not set these global flags if you want fine grained control. The manager maintains the minimum level for all loggers under it's control and the minimum level for all handlers under it's control. The reason it does this is because there is no point in generating debug messages on a logger if there is no handler defined which will output a debug message. Thus when the level is set on a logger it takes into consideration the set of handlers that logger can emit to. IMPORTANT: Because the manager maintains knowledge about all the loggers and handlers under it's control it is essential you use only the managers interface to modify a logger or handler and not set levels on the objects directly, otherwise the manger will not know to visit every object under it's control when a configuraiton changes (see '`LogManager.apply_configuration()`). Example Usage:: # Create a log managers for use by 'my_app' log_mgr = LogManager('my_app') # Create a handler to send error messages to stderr log_mgr.create_log_handlers([dict(stream=sys.stdout, level=logging.ERROR)]) # Create logger for a class class Foo(object): def __init__(self): self.log = log_mgr.get_logger(self) ''' def __init__(self, root_logger_name='', configure_state=None): ''' Create a new LogManager instance using root_logger_name as the parent of all loggers maintained by the manager. Only one log manger should be created for each logging namespace. :parameters: root_logger_name The name of the root logger. All loggers will be prefixed by this name. configure_state Used by clients of the log manager to track the configuration state, may be any object. :return: LogManager instance ''' self.loggers = {} # dict, key is logger name, value is logger object self.handlers = {} # dict, key is handler name, value is handler object self.configure_state = configure_state self.root_logger_name = root_logger_name self.default_level = 'error' self.debug = False self.verbose = False self.logger_regexps = [] self.root_logger = self.get_logger(self.root_logger_name) # Stop loggers and handlers from searching above our root self.root_logger.propagate = False def _get_default_level(self): return self._default_level def _set_default_level(self, value): level = parse_log_level(value) self._default_level = level self.apply_configuration() default_level = property(_get_default_level, _set_default_level, doc='see log_manager.parse_log_level()` for details on how the level can be specified during assignement.') def set_default_level(self, level, configure_state=None): ''' Reset the default logger level, updates all loggers. Note, the default_level may also be set by assigning to the default_level attribute but that does not update the configure_state, this method is provided as a convenience to simultaneously set the configure_state if so desired. :parameters: level The new default level for the log manager. See `log_manager.parse_log_level()` for details on how the level can be specified. configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. ''' level = parse_log_level(level) self._default_level = level self.apply_configuration(configure_state) def __str__(self): ''' When str() is called on the LogManager output it's state. ''' text = '' text += 'root_logger_name: %s\n' % (self.root_logger_name) text += 'configure_state: %s\n' % (self.configure_state) text += 'default_level: %s\n' % (logging.getLevelName(self.default_level)) text += 'debug: %s\n' % (self.debug) text += 'verbose: %s\n' % (self.verbose) text += 'number of loggers: %d\n' % (len(self.loggers)) loggers = [logging_obj_str(x) for x in self.loggers.values()] loggers.sort() for logger in loggers: text += ' %s\n' % (logger) text += 'number of handlers: %d\n' % (len(self.handlers)) handlers = [logging_obj_str(x) for x in self.handlers.values()] handlers.sort() for handler in handlers: text += ' %s\n' % (handler) text += 'number of logger regexps: %d\n' % (len(self.logger_regexps)) for regexp, level in self.logger_regexps: text += ' "%s" => %s\n' % (regexp, logging.getLevelName(level)) return text def configure(self, config, configure_state=None): ''' The log manager is initialized from key,value pairs in the config dict. This may be called any time to modify the logging configuration at run time. The supported entries in the config dict are: default_level The default level applied to a logger when not indivdually configured. The verbose and debug config items override the default level. See `log_manager.parse_log_level()` for details on how the level can be specified. verbose Boolean, if True sets default_level to INFO. debug Boolean, if True sets default_level to DEBUG. logger_regexps List of (regexp, level) tuples. This is a an ordered list regular expressions used to match against a logger name to configure the logger's level. The first regexp in the sequence which matches the logger name will use the the level bound to that regexp to set the logger's level. If no regexp matches the logger name then the logger will be assigned the default_level. The regular expression comparision is performed with the re.search() function which means the match can be located anywhere in the name string (as opposed to the start of the string). Do not forget to escape regular expression metacharacters when appropriate. For example dot ('.') is used to seperate loggers in a logging hierarchy path (e.g. a.b.c) Examples:: # To match exactly the logger a.b.c and set it to DEBUG: logger_regexps = [(r'^a\.b\.c$', 'debug')] # To match any child of a.b and set it to INFO: logger_regexps = [(r'^a\.b\..*', 'info')] # To match any leaf logger with the name c and set it to level 5: logger_regexps = [(r'\.c$', 5)] handlers List of handler config dicts or (config, logger) tuples. See `create_log_handlers()` for details of a hanlder config. The simple form where handlers is a list of dicts each handler is bound to the log mangers root logger (see `create_log_handlers()` optional ``logger`` parameter). If you want to bind each handler to a specific logger other then root handler then group the handler config with a logger in a (config, logger) tuple. The logger may be either a logger name or a logger instance. The following are all valid methods of passing handler configuration.:: # List of 2 config dicts; both handlers bound to root logger [{}, {}] # List of 2 tuples; first handler bound to logger_name1 # by name, second bound to logger2 by object. [({}, 'logger_name1'), ({}, logger2'] # List of 1 dict, 1 tuple; first bound to root logger, # second bound to logger_name by name [{}, ({}, 'logger_name'] :parameters: config Dict of <key,value> pairs describing the configuration. configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. ''' for attr in ('debug', 'verbose', 'logger_regexps'): value = config.get(attr) if value is not None: setattr(self, attr, value) attr = 'default_level' value = config.get(attr) if value is not None: try: level = parse_log_level(value) except Exception as e: raise ValueError("could not set %s (%s)" % (attr, e)) setattr(self, attr, level) attr = 'handlers' handlers = config.get(attr) if handlers is not None: for item in handlers: logger = self.root_logger config = None if isinstance(item, dict): config = item elif isinstance(item, tuple): if len(item) != 2: raise ValueError('handler tuple must have exactly 2 items, got "%s"' % item) config = item[0] logger = item[1] else: raise TypeError('expected dict or tuple for handler item, got "%s", handlers=%s' % \ type(item), value) if not isinstance(config, dict): raise TypeError('expected dict for handler config, got "%s"', type(config)) if isinstance(logger, six.string_types): logger = self.get_logger(logger) else: if not isinstance(logger, logging.Logger): raise TypeError('expected logger name or logger object in %s' % item) self.create_log_handlers([config], logger, configure_state) if self.verbose: self.default_level = logging.INFO if self.debug: self.default_level = logging.DEBUG self.apply_configuration(configure_state) def create_log_handlers(self, configs, logger=None, configure_state=None): ''' Create new handlers and attach them to a logger (log mangers root logger by default). *Note, you may also pass the handler configs to `LogManager.configure()`.* configs is an iterable yielding a dict. Each dict configures a handler. Currently two types of handlers are supported: * stream * file Which type of handler is created is determined by the presence of the ``stream`` or ``filename`` in the dict. Configuration keys: =================== Handler type keys: ------------------ Exactly of the following must present in the config dict: stream Use the specified stream to initialize the StreamHandler. filename Specifies that a FileHandler be created, using the specified filename. log_handler Specifies a custom logging.Handler to use Common keys: ------------ name Set the name of the handler. This is optional but can be useful when examining the logging configuration. For files defaults to ``'file:absolute_path'`` and for streams it defaults to ``'stream:stream_name'`` format Use the specified format string for the handler. time_zone_converter Log record timestamps are seconds since the epoch in the UTC time zone stored as floating point values. When the formatter inserts a timestamp via the %(asctime)s format substitution it calls a time zone converter on the timestamp which returns a time.struct_time value to pass to the time.strftime function along with the datefmt format conversion string. The time module provides two functions with this signature, time.localtime and time.gmtime which performs a conversion to local time and UTC respectively. time.localtime is the default converter. Setting the time zone converter to time.gmtime is appropriate for date/time strings in UTC. The time_zone_converter attribute may be any function with the correct signature. Or as a convenience you may also pass a string which will select either the time.localtime or the time.gmtime converter. The case insenstive string mappings are:: 'local' => time.localtime 'localtime' => time.localtime 'gmt' => time.gmtime 'gmtime' => time.gmtime 'utc' => time.gmtime datefmt Use the specified time.strftime date/time format when formatting a timestamp via the %(asctime)s format substitution. The timestamp is first converted using the time_zone_converter to either local or UTC level Set the handler logger level to the specified level. May be one of the following strings: 'debug', 'info', 'warn', 'warning', 'error', 'critical' or any of the logging level constants. Thus level='debug' is equivalent to level=logging.DEBUG. Defaults to self.default_level. File handler keys: ------------------ filemode Specifies the mode to open the file. Defaults to 'a' for append, use 'w' for write. permission Set the permission bits on the file (i.e. chmod). Must be a valid integer (e.g. 0660 for rw-rw----) user Set the user owning the file. May be either a numeric uid or a basestring with a user name in the passwd file. group Set the group associated with the file, May be either a numeric gid or a basestring with a group name in the groups file. Examples: --------- The following shows how to set two handlers, one for a file (ipa.log) at the debug log level and a second handler set to stdout (e.g. console) at the info log level. (One handler sets it level with a simple name, the other with a logging constant just to illustrate the flexibility) :: # Get a root logger log_mgr = LogManger('my_app') # Create the handlers log_mgr.create_log_handlers([dict(filename='my_app.log', level='info', user='root', group='root', permission=0600, time_zone_converter='utc', datefmt='%Y-%m-%dT%H:%M:%SZ', # ISO 8601 format='<%(levelname)s> [%(asctime)s] module=%(name)s "%(message)s"'), dict(stream=sys.stdout, level=logging.ERROR, format='%(levelname)s: %(message)s')]) # Create a logger for my_app.foo.bar foo_bar_log = log_mgr.get_logger('foo.bar') root_logger.info("Ready to process requests") foo_bar_log.error("something went boom") In the file my_app.log you would see:: <INFO> [2011-10-26T01:39:00Z] module=my_app "Ready to process requests" <ERROR> [2011-10-26T01:39:00Z] module=may_app.foo.bar "something went boom" On the console you would see:: ERROR: something went boom :parameters: configs Sequence of dicts (any iterable yielding a dict). Each dict creates one handler and contains the configuration parameters used to create that handler. logger If unspecified the handlers will be attached to the LogManager.root_logger, otherwise the handlers will be attached to the specified logger. configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. :return: The list of created handers. ''' if logger is None: logger = self.root_logger handlers = [] # Iterate over handler configurations. for cfg in configs: # Type of handler? filename = cfg.get('filename') stream = cfg.get("stream") log_handler = cfg.get("log_handler") if filename: if "stream" in cfg: raise ValueError("both filename and stream are specified, must be one or the other, config: %s" % cfg) path = os.path.abspath(filename) filemode = cfg.get('filemode', 'a') handler = logging.FileHandler(path, filemode) # Set the handler name name = cfg.get("name") if name is None: name = 'file:%s' % (path) handler.name = name # Path should now exist, set ownership and permissions if requested. # Set uid, gid (e.g. chmod) uid = gid = None user = cfg.get('user') group = cfg.get('group') if user is not None: if isinstance(user, six.string_types): pw = pwd.getpwnam(user) uid = pw.pw_uid elif isinstance(user, int): uid = user else: raise TypeError("user (%s) is not int or basestring" % user) if group is not None: if isinstance(group, six.string_types): pw = pwd.getpwnam(group) gid = pw.pw_gid elif isinstance(group, int): gid = group else: raise TypeError("group (%s) is not int or basestring" % group) if uid is not None or gid is not None: if uid is None: uid = -1 if gid is None: gid = -1 os.chown(path, uid, gid) # Set file permissions (e.g. mode) permission = cfg.get('permission') if permission is not None: os.chmod(path, permission) elif stream: handler = logging.StreamHandler(stream) # Set the handler name name = cfg.get("name") if name is None: name = 'stream:%s' % (stream) handler.name = name elif log_handler: handler = log_handler else: raise ValueError( "neither file nor stream nor log_handler specified in " "config: %s" % cfg) # Add the handler handlers.append(handler) # Configure message formatting on the handler format = cfg.get("format", LOGGING_DEFAULT_FORMAT) datefmt = cfg.get("datefmt", None) formatter = logging.Formatter(format, datefmt) time_zone_converter = cfg.get('time_zone_converter', time.localtime) if isinstance(time_zone_converter, six.string_types): converter = {'local' : time.localtime, 'localtime' : time.localtime, 'gmt' : time.gmtime, 'gmtime' : time.gmtime, 'utc' : time.gmtime}.get(time_zone_converter.lower()) if converter is None: raise ValueError("invalid time_zone_converter name (%s)" % \ time_zone_converter) elif callable(time_zone_converter): converter = time_zone_converter else: raise ValueError("time_zone_converter must be basestring or callable, not %s" % \ type(time_zone_converter)) formatter.converter = converter handler.setFormatter(formatter) # Set the logging level level = cfg.get('level') if level is not None: try: level = parse_log_level(level) except Exception as e: print('could not set handler log level "%s" (%s)' % (level, e), file=sys.stderr) level = None if level is None: level = self.default_level handler.setLevel(level) for handler in handlers: if handler.name in self.handlers: raise ValueError('handler "%s" already exists' % handler.name) logger.addHandler(handler) self.handlers[handler.name] = handler self.apply_configuration(configure_state) return handlers def get_handler(self, handler_name): ''' Given a handler name return the handler object associated with it. :parameters: handler_name Name of the handler to look-up. :returns: The handler object associated with the handler name. ''' handler = self.handlers.get(handler_name) if handler is None: raise KeyError('handler "%s" is not defined' % handler_name) return handler def set_handler_level(self, handler_name, level, configure_state=None): ''' Given a handler name, set the handler's level, return previous level. :parameters: handler_name Name of the handler to look-up. level The new level for the handler. See `log_manager.parse_log_level()` for details on how the level can be specified. configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. :returns: The handler's previous level ''' handler = self.get_handler(handler_name) level = parse_log_level(level) prev_level = handler.level handler.setLevel(level) self.apply_configuration(configure_state) return prev_level def get_loggers_with_handler(self, handler): ''' Given a handler return a list of loggers that hander is bound to. :parameters: handler The name of a handler or a handler object. :returns: List of loggers with the handler is bound to. ''' if isinstance(handler, six.string_types): handler = self.get_handler(handler) elif isinstance(handler, logging.Handler): if not handler in self.handlers.values(): raise ValueError('handler "%s" is not managed by this log manager' % \ logging_obj_str(handler)) else: raise TypeError('handler must be basestring or Handler object, got %s' % type(handler)) loggers = [] for logger in self.loggers.values(): if handler in logger.handlers: loggers.append(logger) return loggers def remove_handler(self, handler, logger=None, configure_state=None): ''' Remove the named handler. If logger is unspecified the handler will be removed from all managed loggers, otherwise it will be removed from only the specified logger. :parameters: handler The name of the handler to be removed or the handler object. logger If unspecified the handler is removed from all loggers, otherwise the handler is removed from only this logger. configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. ''' if isinstance(handler, six.string_types): handler = self.get_handler(handler) elif not isinstance(handler, logging.Handler): raise TypeError('handler must be basestring or Handler object, got %s' % type(handler)) handler_name = handler.name if handler_name is None: raise ValueError('handler "%s" does not have a name' % logging_obj_str(handler)) loggers = self.get_loggers_with_handler(handler) if logger is None: for logger in loggers: logger.removeHandler(handler) del self.handlers[handler_name] else: if not logger in loggers: raise ValueError('handler "%s" is not bound to logger "%s"' % \ (handler_name, logging_obj_str(logger))) logger.removeHandler(handler) if len(loggers) == 1: del self.handlers[handler_name] self.apply_configuration(configure_state) def apply_configuration(self, configure_state=None): ''' Using the log manager's internal configuration state apply the configuration to all the objects managed by the log manager. :parameters: configure_state If other than None update the log manger's configure_state variable to this object. Clients of the log manager can use configure_state to track the state of the log manager. ''' if configure_state is not None: self.configure_state = configure_state for logger in self.loggers.values(): self._set_configured_logger_level(logger) def get_configured_logger_level(self, name): ''' Given a logger name return it's level as defined by the `LogManager` configuration. :parameters: name logger name :returns: log level ''' level = self.default_level for regexp, config_level in self.logger_regexps: if re.search(regexp, name): level = config_level break level = parse_log_level(level) return level def get_logger_handlers(self, logger): ''' Return the set of unique handlers visible to this logger. :parameters: logger The logger whose visible and enabled handlers will be returned. :return: Set of handlers ''' handlers = set() while logger: for handler in logger.handlers: handlers.add(handler) if logger.propagate: logger = logger.parent else: logger = None return handlers def get_minimum_handler_level_for_logger(self, logger): ''' Return the minimum handler level of all the handlers the logger is exposed to. :parameters: logger The logger whose handlers will be examined. :return: The minimum of all the handler's levels. If no handlers are defined sys.maxsize will be returned. ''' handlers = self.get_logger_handlers(logger) min_level = get_minimum_level(handlers) return min_level def _set_configured_logger_level(self, logger): ''' Based on the current configuration maintained by the log manager set this logger's level. If the level specified for this logger by the configuration is less than the minimum level supported by the output handlers the logger is exposed to then adjust the logger's level higher to the minimum handler level. This is a performance optimization, no point in emitting a log message if no handlers will ever output it. :parameters: logger The logger whose level is being configured. :return: The level actually set on the logger. ''' level = self.get_configured_logger_level(logger.name) minimum_handler_level = self.get_minimum_handler_level_for_logger(logger) if level < minimum_handler_level: level = minimum_handler_level logger.setLevel(level) return level def get_logger(self, who, bind_logger_names=False): ''' Return the logger for an object or a name. If the logger already exists return the existing instance otherwise create the logger. The who parameter may be either a name or an object. Loggers are identified by a name but because loggers are usually bound to a class this method is optimized to handle that case. If who is an object: * The name object's module name (dot seperated) and the object's class name. * Optionally the logging output methods can be bound to the object if bind_logger_names is True. Otherwise if who is a basestring it is used as the logger name. In all instances the root_logger_name is prefixed to every logger created by the manager. :parameters: who If a basestring then use this as the logger name, prefixed with the root_logger_name. Otherwise who is treated as a class instance. The logger name is formed by prepending the root_logger_name to the module name and then appending the class name. All name components are dot seperated. Thus if the root_logger_name is 'my_app', the class is ParseFileConfig living in the config.parsers module the logger name will be: ``my_app.config.parsers.ParseFileConfig``. bind_logger_names If true the class instance will have the following bound to it: ``log``, ``debug()``, ``info()``, ``warning()``, ``error()``, ``exception()``, ``critical()``. Where log is the logger object and the others are the loggers output methods. This is a convenience which allows you emit logging messages directly, for example:: self.debug('%d names defined', self.num_names). :return: The logger matching the name indicated by who. If the logger pre-existed return that instance otherwise create the named logger return it. ''' is_object = False if isinstance(who, six.string_types): obj_name = who else: is_object = True obj_name = '%s.%s' % (who.__module__, who.__class__.__name__) if obj_name == self.root_logger_name: logger_name = obj_name else: logger_name = self.root_logger_name + '.' + obj_name # If logger not in our cache then create and initialize the logger. logger = self.loggers.get(logger_name) if logger is None: logger = logging.getLogger(logger_name) self.loggers[logger_name] = logger self._set_configured_logger_level(logger) if bind_logger_names and is_object and getattr(who, '__log_manager', None) is None: setattr(who, '__log_manager', self) method = 'log' if hasattr(who, method): raise ValueError('%s is already bound to %s' % (method, repr(who))) setattr(who, method, logger) for method in logger_method_names: if hasattr(who, method): raise ValueError('%s is already bound to %s' % (method, repr(who))) setattr(who, method, getattr(logger, method)) return logger<|fim▁end|>
Global levels are supported via the verbose and debug flags
<|file_name|>g3Config.C<|end_file_name|><|fim▁begin|>//------------------------------------------------ // The Virtual Monte Carlo examples // Copyright (C) 2007 - 2014 Ivana Hrivnacova // All rights reserved. // // For the licensing terms see geant4_vmc/LICENSE. // Contact: [email protected] //------------------------------------------------- /// \file E01/g3Config.C /// \brief Configuration macro for Geant3 VMC for Example01 void Config() { /// The configuration function for Geant3 VMC for Example01 /// called during MC application initialization. <|fim▁hole|>}<|fim▁end|>
new TGeant3("C++ Interface to Geant3"); cout << "Geant3 has been created." << endl;
<|file_name|>hmm.py<|end_file_name|><|fim▁begin|>#author Matt Jacobsen ''' This program will learn and predict words and sentences using a Hierarchical Hidden Markov Model (HHMM). Implement a Baum-Welch algorithm (like EM?) to learn parameters Implement a Viterbi algorithm to learn structure. Implement a forward-backward algorithm (like BP) to do inference over the evidence. ''' ''' can do things like adjust sutff to be more like stuff #probabilities for a single word #states --> s t u f f #emisions --> s t u f ''' import sys, pprint as pp class HMM(object): numstates = 2 #prior probabilities pprob = [0.5, 0.5] #transition probabilities aprob = [[0.8, 0.2], [0.2, 0.8]] #emission probabilities bprob = [[0.6, 0.4], [0.4, 0.6]] bmap = { 'l': 0, 'r': 1 } def __init__(self): pass #compute forward probabilities def forward(self, O): pi = self.pprob a = self.aprob b = self.bprob bmap = self.bmap <|fim▁hole|> #initialization alpha = [[1.0]*len(O) for i in range(self.numstates)] for t in range(0, len(O)): for i in range(0, self.numstates): alpha[i][t] = pi[i] * b[i][bmap[O[t]]] #recursion for t in range(1, len(O)): for j in range(0, self.numstates): sum_i = 0.0 for i in range(0, self.numstates): sum_i += alpha[i][t-1] * a[i][j] alpha[j][t] = sum_i * b[j][bmap[O[t]]] #normalize alpha to avoid underflow for t in range(0, len(O)-1): for n in range(0,len(alpha)): alpha[n][t] = alpha[n][t] / sum(alpha[n]) return alpha #compute backward probabilities def backward(self, O): pi = self.pprob a = self.aprob b = self.bprob bmap = self.bmap #initialization beta = [[1.0]*len(O) for i in range(self.numstates)] #recursion for t in range(len(O)-2, -1, -1): for i in range(self.numstates-1, -1, -1): sum_i = 0.0 for j in range(self.numstates-1, -1, -1): sum_i += a[i][j] * beta[i][t+1] beta[i][t] = sum_i * b[i][bmap[O[t]]] #normalize alpha to avoid underflow for t in range(0, len(O)-1): for n in range(0,len(beta)): beta[n][t] = beta[n][t] / sum(beta[n]) return beta #compute smoother posterior probabilities def posterior(self, O): alpha = self.forward(O) beta = self.backward(O) p = [0.0]*self.numstates #dot product between alpha and beta for i in range(0, len(p)): p[i] = [0.0] * len(alpha[i]) for j in range(0, len(alpha[i])): p[i][j] += alpha[i][j] * beta[i][j] #normalize to be a distribution sum_p_i = [0.0]*len(p[0]) for i in range(0,len(p)): for j in range(0, len(p[i])): sum_p_i[j] += p[i][j] for i in range(0,len(p)): for j in range(0, len(p[i])): p[i][j] = p[i][j] / sum_p_i[j] return p #learn HMM parameters (emission and transition probabilities) from a set of observations def baumwelch(): pass #learn HMM structure from a set of observations def viterbi(): pass if __name__ == "__main__": if len(sys.argv) < 2: print 'missing test input' sys.exit() hmm = HMM() ''' print 'forward' pp.pprint(hmm.forward(sys.argv[1])) print 'backward' pp.pprint(hmm.backward(sys.argv[1])) ''' print 'posterior' pp.pprint(hmm.posterior(sys.argv[1]))<|fim▁end|>
#will be used to store alpha_t+1
<|file_name|>test_selenium.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals import time import pytest @pytest.mark.selenium def test_ui(selenium):<|fim▁hole|> selenium.browser.get(selenium.url('/download')) time.sleep(3)<|fim▁end|>
<|file_name|>app.po.ts<|end_file_name|><|fim▁begin|>import { browser, by, element } from 'protractor'; export class Angular2Page { navigateTo() { return browser.get('/'); } getParagraphText() {<|fim▁hole|> } }<|fim▁end|>
return element(by.css('app-root h1')).getText();
<|file_name|>cache.py<|end_file_name|><|fim▁begin|>import time import os import sys import hashlib import gc import shutil import platform import errno import logging try: import cPickle as pickle except: import pickle from parso._compatibility import FileNotFoundError LOG = logging.getLogger(__name__) _PICKLE_VERSION = 30 """ Version number (integer) for file system cache. Increment this number when there are any incompatible changes in the parser tree classes. For example, the following changes are regarded as incompatible. - A class name is changed. - A class is moved to another module. - A __slot__ of a class is changed. """ _VERSION_TAG = '%s-%s%s-%s' % ( platform.python_implementation(), sys.version_info[0], sys.version_info[1], _PICKLE_VERSION ) """ Short name for distinguish Python implementations and versions. It's like `sys.implementation.cache_tag` but for Python < 3.3 we generate something similar. See: http://docs.python.org/3/library/sys.html#sys.implementation """ def _get_default_cache_path(): if platform.system().lower() == 'windows': dir_ = os.path.join(os.getenv('LOCALAPPDATA') or '~', 'Parso', 'Parso') elif platform.system().lower() == 'darwin': dir_ = os.path.join('~', 'Library', 'Caches', 'Parso') else: dir_ = os.path.join(os.getenv('XDG_CACHE_HOME') or '~/.cache', 'parso') return os.path.expanduser(dir_) _default_cache_path = _get_default_cache_path() """ The path where the cache is stored. On Linux, this defaults to ``~/.cache/parso/``, on OS X to ``~/Library/Caches/Parso/`` and on Windows to ``%LOCALAPPDATA%\\Parso\\Parso\\``. On Linux, if environment variable ``$XDG_CACHE_HOME`` is set, ``$XDG_CACHE_HOME/parso`` is used instead of the default one. """ parser_cache = {} class _NodeCacheItem(object): def __init__(self, node, lines, change_time=None): self.node = node self.lines = lines if change_time is None: change_time = time.time() self.change_time = change_time def load_module(hashed_grammar, path, cache_path=None): """ Returns a module or None, if it fails. """ try: p_time = os.path.getmtime(path) except FileNotFoundError: return None try: module_cache_item = parser_cache[hashed_grammar][path] if p_time <= module_cache_item.change_time: return module_cache_item.node except KeyError: return _load_from_file_system(hashed_grammar, path, p_time, cache_path=cache_path) def _load_from_file_system(hashed_grammar, path, p_time, cache_path=None): cache_path = _get_hashed_path(hashed_grammar, path, cache_path=cache_path) try: try: if p_time > os.path.getmtime(cache_path): # Cache is outdated return None except OSError as e: if e.errno == errno.ENOENT: # In Python 2 instead of an IOError here we get an OSError. raise FileNotFoundError else: raise with open(cache_path, 'rb') as f: gc.disable() try: module_cache_item = pickle.load(f) finally: gc.enable() except FileNotFoundError: return None else: parser_cache.setdefault(hashed_grammar, {})[path] = module_cache_item LOG.debug('pickle loaded: %s', path) return module_cache_item.node <|fim▁hole|> except OSError: p_time = None pickling = False item = _NodeCacheItem(module, lines, p_time) parser_cache.setdefault(hashed_grammar, {})[path] = item if pickling and path is not None: _save_to_file_system(hashed_grammar, path, item, cache_path=cache_path) def _save_to_file_system(hashed_grammar, path, item, cache_path=None): with open(_get_hashed_path(hashed_grammar, path, cache_path=cache_path), 'wb') as f: pickle.dump(item, f, pickle.HIGHEST_PROTOCOL) def clear_cache(cache_path=None): if cache_path is None: cache_path = _default_cache_path shutil.rmtree(cache_path) parser_cache.clear() def _get_hashed_path(hashed_grammar, path, cache_path=None): directory = _get_cache_directory_path(cache_path=cache_path) file_hash = hashlib.sha256(path.encode("utf-8")).hexdigest() return os.path.join(directory, '%s-%s.pkl' % (hashed_grammar, file_hash)) def _get_cache_directory_path(cache_path=None): if cache_path is None: cache_path = _default_cache_path directory = os.path.join(cache_path, _VERSION_TAG) if not os.path.exists(directory): os.makedirs(directory) return directory<|fim▁end|>
def save_module(hashed_grammar, path, module, lines, pickling=True, cache_path=None): try: p_time = None if path is None else os.path.getmtime(path)
<|file_name|>voting_variant.py<|end_file_name|><|fim▁begin|>from app import db from sqlalchemy import Column, String, Integer, ForeignKey<|fim▁hole|> class VotingVariant(db.Model): __tablename__ = 'voting_variants' id = Column(Integer, primary_key=True) voting_id = Column(Integer, ForeignKey('votings.id')) title = Column(String(255)) description = Column(String(1000)) voting = db.relationship('Voting')<|fim▁end|>
<|file_name|>borrowck-overloaded-index-autoderef.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test that we still see borrowck errors of various kinds when using // indexing and autoderef in combination. use std::ops::{Index, IndexMut}; struct Foo { x: isize, y: isize, } impl<'a> Index<&'a String> for Foo { type Output = isize; fn index(&self, z: &String) -> &isize { if *z == "x" { &self.x } else { &self.y } } } impl<'a> IndexMut<&'a String> for Foo { fn index_mut(&mut self, z: &String) -> &mut isize { if *z == "x" { &mut self.x } else { &mut self.y } } } fn test1(mut f: Box<Foo>, s: String) { let p = &mut f[&s]; let q = &f[&s]; //~ ERROR cannot borrow p.use_mut(); } fn test2(mut f: Box<Foo>, s: String) { let p = &mut f[&s]; let q = &mut f[&s]; //~ ERROR cannot borrow p.use_mut(); } struct Bar { foo: Foo } fn test3(mut f: Box<Bar>, s: String) { let p = &mut f.foo[&s]; let q = &mut f.foo[&s]; //~ ERROR cannot borrow p.use_mut(); } fn test4(mut f: Box<Bar>, s: String) { let p = &f.foo[&s]; let q = &f.foo[&s]; p.use_ref(); } fn test5(mut f: Box<Bar>, s: String) { let p = &f.foo[&s]; let q = &mut f.foo[&s]; //~ ERROR cannot borrow p.use_ref(); } fn test6(mut f: Box<Bar>, g: Foo, s: String) { let p = &f.foo[&s]; f.foo = g; //~ ERROR cannot assign p.use_ref(); } fn test7(mut f: Box<Bar>, g: Bar, s: String) { let p = &f.foo[&s]; *f = g; //~ ERROR cannot assign<|fim▁hole|> fn test8(mut f: Box<Bar>, g: Foo, s: String) { let p = &mut f.foo[&s]; f.foo = g; //~ ERROR cannot assign p.use_mut(); } fn test9(mut f: Box<Bar>, g: Bar, s: String) { let p = &mut f.foo[&s]; *f = g; //~ ERROR cannot assign p.use_mut(); } fn main() { } trait Fake { fn use_mut(&mut self) { } fn use_ref(&self) { } } impl<T> Fake for T { }<|fim▁end|>
p.use_ref(); }
<|file_name|>config.js<|end_file_name|><|fim▁begin|>System.config({ baseURL: "/", defaultJSExtensions: true, transpiler: "babel", babelOptions: { "optional": [ "runtime" ], "stage": 1 }, paths: { "github:*": "jspm_packages/github/*", "npm:*": "jspm_packages/npm/*" }, map: { "angular2": "npm:[email protected]", "babel": "npm:[email protected]", "babel-runtime": "npm:[email protected]", "clean-css": "npm:[email protected]", "core-js": "npm:[email protected]", "css": "github:systemjs/[email protected]", "reflect-metadata": "npm:[email protected]", "rxjs": "npm:[email protected]", "zone.js": "npm:[email protected]", "github:jspm/[email protected]": { "assert": "npm:[email protected]" }, "github:jspm/[email protected]": { "buffer": "npm:[email protected]" }, "github:jspm/[email protected]": { "constants-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "crypto-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "events": "npm:[email protected]" }, "github:jspm/[email protected]": { "Base64": "npm:[email protected]", "events": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "stream": "github:jspm/[email protected]", "url": "github:jspm/[email protected]", "util": "github:jspm/[email protected]" }, "github:jspm/[email protected]": { "https-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "os-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "path-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "process": "npm:[email protected]" }, "github:jspm/[email protected]": { "stream-browserify": "npm:[email protected]" }, "github:jspm/[email protected]": { "string_decoder": "npm:[email protected]" }, "github:jspm/[email protected]": { "url": "npm:[email protected]" }, "github:jspm/[email protected]": { "util": "npm:[email protected]" }, "github:jspm/[email protected]": { "vm-browserify": "npm:[email protected]" }, "npm:[email protected]": { "fs": "github:jspm/[email protected]", "module": "github:jspm/[email protected]", "path": "github:jspm/[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "crypto": "github:jspm/[email protected]", "es6-promise": "npm:[email protected]", "es6-shim": "npm:[email protected]", "process": "github:jspm/[email protected]", "reflect-metadata": "npm:[email protected]", "rxjs": "npm:[email protected]", "zone.js": "npm:[email protected]" }, "npm:[email protected]": { "assert": "github:jspm/[email protected]", "bn.js": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "minimalistic-assert": "npm:[email protected]", "vm": "github:jspm/[email protected]" }, "npm:[email protected]": { "util": "npm:[email protected]" }, "npm:[email protected]": { "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "buffer-xor": "npm:[email protected]", "cipher-base": "npm:[email protected]", "create-hash": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "evp_bytestokey": "npm:[email protected]", "fs": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "browserify-aes": "npm:[email protected]", "browserify-des": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "crypto": "github:jspm/[email protected]", "evp_bytestokey": "npm:[email protected]"<|fim▁hole|> "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "cipher-base": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "des.js": "npm:[email protected]", "inherits": "npm:[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "constants": "github:jspm/[email protected]", "crypto": "github:jspm/[email protected]", "randombytes": "npm:[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "browserify-rsa": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "create-hash": "npm:[email protected]", "create-hmac": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "elliptic": "npm:[email protected]", "inherits": "npm:[email protected]", "parse-asn1": "npm:[email protected]", "stream": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "base64-js": "npm:[email protected]", "child_process": "github:jspm/[email protected]", "fs": "github:jspm/[email protected]", "ieee754": "npm:[email protected]", "isarray": "npm:[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "stream": "github:jspm/[email protected]", "string_decoder": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "commander": "npm:[email protected]", "fs": "github:jspm/[email protected]", "http": "github:jspm/[email protected]", "https": "github:jspm/[email protected]", "os": "github:jspm/[email protected]", "path": "github:jspm/[email protected]", "process": "github:jspm/[email protected]", "source-map": "npm:[email protected]", "url": "github:jspm/[email protected]", "util": "github:jspm/[email protected]" }, "npm:[email protected]": { "child_process": "github:jspm/[email protected]", "events": "github:jspm/[email protected]", "fs": "github:jspm/[email protected]", "graceful-readlink": "npm:[email protected]", "path": "github:jspm/[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "fs": "github:jspm/[email protected]", "path": "github:jspm/[email protected]", "process": "github:jspm/[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "crypto": "github:jspm/[email protected]", "elliptic": "npm:[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "cipher-base": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "fs": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "ripemd160": "npm:[email protected]", "sha.js": "npm:[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "create-hash": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "stream": "github:jspm/[email protected]" }, "npm:[email protected]": { "browserify-cipher": "npm:[email protected]", "browserify-sign": "npm:[email protected]", "create-ecdh": "npm:[email protected]", "create-hash": "npm:[email protected]", "create-hmac": "npm:[email protected]", "diffie-hellman": "npm:[email protected]", "inherits": "npm:[email protected]", "pbkdf2": "npm:[email protected]", "public-encrypt": "npm:[email protected]", "randombytes": "npm:[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "minimalistic-assert": "npm:[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "crypto": "github:jspm/[email protected]", "miller-rabin": "npm:[email protected]", "randombytes": "npm:[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "brorand": "npm:[email protected]", "hash.js": "npm:[email protected]", "inherits": "npm:[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "create-hash": "npm:[email protected]", "crypto": "github:jspm/[email protected]" }, "npm:[email protected]": { "fs": "github:jspm/[email protected]" }, "npm:[email protected]": { "inherits": "npm:[email protected]" }, "npm:[email protected]": { "http": "github:jspm/[email protected]" }, "npm:[email protected]": { "util": "github:jspm/[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "brorand": "npm:[email protected]" }, "npm:[email protected]": { "os": "github:jspm/[email protected]" }, "npm:[email protected]": { "asn1.js": "npm:[email protected]", "browserify-aes": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "create-hash": "npm:[email protected]", "evp_bytestokey": "npm:[email protected]", "pbkdf2": "npm:[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "child_process": "github:jspm/[email protected]", "create-hmac": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "path": "github:jspm/[email protected]", "process": "github:jspm/[email protected]", "systemjs-json": "github:systemjs/[email protected]" }, "npm:[email protected]": { "assert": "github:jspm/[email protected]" }, "npm:[email protected]": { "bn.js": "npm:[email protected]", "browserify-rsa": "npm:[email protected]", "buffer": "github:jspm/[email protected]", "create-hash": "npm:[email protected]", "crypto": "github:jspm/[email protected]", "parse-asn1": "npm:[email protected]", "randombytes": "npm:[email protected]" }, "npm:[email protected]": { "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "crypto": "github:jspm/[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "core-util-is": "npm:[email protected]", "events": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "isarray": "npm:[email protected]", "process": "github:jspm/[email protected]", "stream-browserify": "npm:[email protected]", "string_decoder": "npm:[email protected]" }, "npm:[email protected]": { "assert": "github:jspm/[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]", "fs": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "amdefine": "npm:[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "events": "github:jspm/[email protected]", "inherits": "npm:[email protected]", "readable-stream": "npm:[email protected]" }, "npm:[email protected]": { "buffer": "github:jspm/[email protected]" }, "npm:[email protected]": { "assert": "github:jspm/[email protected]", "punycode": "npm:[email protected]", "querystring": "npm:[email protected]", "util": "github:jspm/[email protected]" }, "npm:[email protected]": { "inherits": "npm:[email protected]", "process": "github:jspm/[email protected]" }, "npm:[email protected]": { "indexof": "npm:[email protected]" }, "npm:[email protected]": { "process": "github:jspm/[email protected]" } } });<|fim▁end|>
},
<|file_name|>test_probe_count.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright (c) Suchakra Sharma <[email protected]> # Licensed under the Apache License, Version 2.0 (the "License") from bcc import BPF, _get_num_open_probes, TRACEFS import os import sys from unittest import main, TestCase class TestKprobeCnt(TestCase): def setUp(self): self.b = BPF(text=""" int wololo(void *ctx) { return 0; } """) self.b.attach_kprobe(event_re="^vfs_.*", fn_name="wololo") def test_attach1(self): actual_cnt = 0 with open("%s/available_filter_functions" % TRACEFS, "rb") as f: for line in f: if line.startswith(b"vfs_"): actual_cnt += 1 open_cnt = self.b.num_open_kprobes() self.assertEqual(actual_cnt, open_cnt)<|fim▁hole|> def tearDown(self): self.b.cleanup() class TestProbeGlobalCnt(TestCase): def setUp(self): self.b1 = BPF(text="""int count(void *ctx) { return 0; }""") self.b2 = BPF(text="""int count(void *ctx) { return 0; }""") def test_probe_quota(self): self.b1.attach_kprobe(event="schedule", fn_name="count") self.b2.attach_kprobe(event="submit_bio", fn_name="count") self.assertEqual(1, self.b1.num_open_kprobes()) self.assertEqual(1, self.b2.num_open_kprobes()) self.assertEqual(2, _get_num_open_probes()) self.b1.cleanup() self.b2.cleanup() self.assertEqual(0, _get_num_open_probes()) class TestAutoKprobe(TestCase): def setUp(self): self.b = BPF(text=""" int kprobe__schedule(void *ctx) { return 0; } int kretprobe__schedule(void *ctx) { return 0; } """) def test_count(self): self.assertEqual(2, self.b.num_open_kprobes()) def tearDown(self): self.b.cleanup() class TestProbeQuota(TestCase): def setUp(self): self.b = BPF(text="""int count(void *ctx) { return 0; }""") def test_probe_quota(self): with self.assertRaises(Exception): self.b.attach_kprobe(event_re=".*", fn_name="count") def test_uprobe_quota(self): with self.assertRaises(Exception): self.b.attach_uprobe(name="c", sym_re=".*", fn_name="count") def tearDown(self): self.b.cleanup() class TestProbeNotExist(TestCase): def setUp(self): self.b = BPF(text="""int count(void *ctx) { return 0; }""") def test_not_exist(self): with self.assertRaises(Exception): b.attach_kprobe(event="___doesnotexist", fn_name="count") def tearDown(self): self.b.cleanup() if __name__ == "__main__": main()<|fim▁end|>
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>""" WSGI config for mongobacked project. <|fim▁hole|>https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/ """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mongobacked.settings") from django.core.wsgi import get_wsgi_application application = get_wsgi_application()<|fim▁end|>
It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see
<|file_name|>server_errors.tsx<|end_file_name|><|fim▁begin|>/* * Copyright 2022 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {RestyleAttrs, RestyleComponent} from "jsx/mithril-component"; import m from "mithril"; import Stream from "mithril/stream"; import {Errors} from "models/mixins/errors"; import * as defaultStyles from "views/pages/pipelines/server_errors.scss"; type Styles = typeof defaultStyles; interface Attrs extends RestyleAttrs<Styles> { details: Stream<Errors>; message: Stream<string>; } export class ServerErrors extends RestyleComponent<Styles, Attrs> { css: Styles = defaultStyles; view(vnode: m.Vnode<Attrs>) { return <div class={this.css.errorResponse}> {this.message(vnode)} {this.details(vnode)} </div>; } message(vnode: m.Vnode<Attrs>) { const unmatched = vnode.attrs.details(); return <span> { (unmatched && unmatched.keys().length) ? vnode.attrs.message() + ": " : vnode.attrs.message() } </span>; } details(vnode: m.Vnode<Attrs>) { const unmatched = vnode.attrs.details(); if (unmatched && unmatched.keys().length) { return <ol> { unmatched.keys().map((key) => <li>{`${key}: ${unmatched.errorsForDisplay(key)}`}</li>) } </ol>; }<|fim▁hole|><|fim▁end|>
} }
<|file_name|>gidhistograms.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2014 Michael Krause ( http://krause-software.com/ ). # You are free to use this code under the MIT license: # http://opensource.org/licenses/MIT """Show some histograms for a directory a Xcode project files.""" from __future__ import print_function import sys import argparse from os.path import abspath, dirname, join import multiprocessing from collections import defaultdict, Counter import codecs # Set up the Python path so we find the xcodeprojer module in the parent directory # relative to this file. sys.path.insert(1, dirname(dirname(abspath(__file__)))) import utils import xcodeprojer from xcodeprojer import bytestr, unistr PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 if PY2: text_type = unicode binary_type = str else: text_type = str binary_type = bytes unichr = chr try: NARROW_BUILD = len(unichr(0x1f300)) == 2 except ValueError: NARROW_BUILD = True DEFAULT_FIRSTNAMES = 200 user_hash = xcodeprojer.UniqueXcodeIDGenerator.user_hash emojis = [] def here(): return dirname(abspath(__file__)) def rel(filename): return join(here(), filename) def write(s, end='\n'): s = unistr(s) + unistr(end) s = s.encode('utf-8') if PY2: sys.stdout.write(s) else: sys.stdout.buffer.write(s) def writeline(): write('\n') def uniord(s): """ord that works on surrogate pairs. """ try: return ord(s) except TypeError: pass if len(s) != 2: raise return 0x10000 + ((ord(s[0]) - 0xd800) << 10) | (ord(s[1]) - 0xdc00) def iterchars(text): if not NARROW_BUILD: for c in text: yield c idx = 0 while idx < len(text): c = text[idx] if ord(c) >= 0x100: # When we are running on a narrow Python build # we have to deal with surrogate pairs ourselves. if ((0xD800 < ord(c) <= 0xDBFF) and (idx < len(text) - 1) and (0xDC00 < ord(text[idx + 1]) <= 0xDFFF)): c = text[idx:idx+2] # Skip the other half of the lead and trail surrogate idx += 1 idx += 1 yield c def build_emoji_table(): with codecs.open(rel('emojis.txt'), 'r', encoding='utf-8') as f: text = f.read() uniques = set() for c in iterchars(text): # Only use unicode chars >= 0x100 (emoji etc.) if len(c) >= 2 or ord(c) >= 0x100: if c not in uniques: emojis.append(c) uniques.add(c) def print_emoji_table(): per_line = 32 for i in range(len(emojis)): if i % per_line == 0: write("%3d" % i, end=' ') write(emojis[i], end=' ') if i % per_line == per_line - 1: writeline() writeline() def print_emoji_histo(histo): all_users = set() for year, users in histo.items(): all_users.update(users) all_users = sorted(all_users) num_users = len(all_users) for year, users in histo.items(): chars = [str(year), ' '] for i in range(num_users): if all_users[i] in users: c = emojis[all_users[i]] + ' ' else: c = ' ' chars.append(c) write(''.join(chars)) write('\n') def print_histo(histo, utcoffset=0): maximum = max(histo.values()) max_display = 60 for k in sorted(histo): if utcoffset != 0: localhour = (k - utcoffset) % 24 else: localhour = k v = histo.get(localhour, 0) stars = '*' * int(v * max_display / float(maximum)) write("%3d %5d %s" % (k, v, stars)) writeline() def gidtable(filename): with open(filename, 'rb') as f: xcodeproj = f.read() root, parseinfo = xcodeprojer.parse(xcodeproj) if root is not None: unparser = xcodeprojer.Unparser(root) # We don't need the parse tree, only access to the gidcomments # that are built during the unparse. _ = unparser.unparse(root, projectname=xcodeprojer.projectname_for_path(filename)) gidcomments = unparser.gidcomments c = '.' else: gidcomments = {} c = 'X' sys.stdout.write(c) sys.stdout.flush() return filename, gidcomments def histogram(args, utcoffset=0): if args.emoji or args.emojitable: write("Please be patient when your computer is caching emoji fonts for you. This might take a minute.\n") build_emoji_table() if args.emojitable: print_emoji_table() return path = args.directory histo_year = Counter() histo_hour = Counter() users_per_year = defaultdict(set) pool = multiprocessing.Pool(initializer=utils.per_process_init) filenames = xcodeprojer.find_projectfiles(path) results = [] write("Looking for Xcode ids in project files...") sys.stdout.flush() for idx, filename in enumerate(filenames): results.append(pool.apply_async(gidtable, [filename])) if args.max_files is not None and idx + 1 >= args.max_files: break pool.close() try: for asyncresult in results: filename, gids = asyncresult.get() for gid in gids: fields = xcodeprojer.gidfields(gids, gid) refdate = fields['date'] dt = xcodeprojer.datetime_from_utc(refdate) histo_hour[dt.hour] += 1 year = dt.year if args.startyear <= year <= args.endyear: histo_year[year] += 1 users_per_year[year].add(fields['user']) except (KeyboardInterrupt, GeneratorExit): pool.terminate() finally: pool.join() writeline() write("At which hours are new Xcode ids created (UTC time offset: %d)" % args.utcoffset) print_histo(histo_hour, utcoffset=utcoffset) write("In which years were the Xcode ids created (we only look at %s-%s)" % (args.startyear, args.endyear)) print_histo(histo_year) write("Estimated number of users creating new Xcode ids by year") user_histo = {k: len(v) for (k, v) in users_per_year.items()} print_histo(user_histo) writeline() write("The following is a list of names that might be completely unrelated to the examined Xcode projects.") write("For something for tangible replace firstnames.txt with your own list.") writeline() max_firstnames_limited = print_names(args, users_per_year, emoji=args.emoji) if args.emoji: write("Looking for Xcode ids in project files...") print_emoji_histo(users_per_year) if max_firstnames_limited and args.max_firstnames is None: write("The number of first names to consider was limited to %d, this can be changed with --max-firstnames" % max_firstnames_limited) def print_names(args, users_per_year, emoji=False): userhashes = defaultdict(list) max_firstnames = args.max_firstnames if max_firstnames is None: max_firstnames = DEFAULT_FIRSTNAMES max_firstnames_limited = None with codecs.open(rel('firstnames.txt'), 'r', encoding='utf-8') as f: firstnames = f.read().splitlines() for idx, name in enumerate(firstnames): if idx >= max_firstnames: max_firstnames_limited = max_firstnames break userhashes[user_hash(name)].append(name) for year, hashes in sorted(users_per_year.items()): write(str(year), end=' ') for h in sorted(hashes): candidates = userhashes[h] if candidates: if emoji: symbol = emojis[h] + ' ' else: symbol = '' write(' (%s' % symbol + ' | '.join(candidates) + ')', end=' ') writeline() return max_firstnames_limited def main(): parser = argparse.ArgumentParser(description='Show some histograms for a directory a Xcode project files.') parser.add_argument('-u', '--utcoffset', type=int, default=-8, metavar='UTCOFFSET', help='UTC time offset, e.g. "-8" for California')<|fim▁hole|> parser.add_argument('--emoji', action='store_true', help='add emoji characters to userhashes') parser.add_argument('--emojitable', action='store_true', help='only print the emoji table') parser.add_argument('--profile', action='store_true', help='run everything through the profiler') parser.add_argument('directory', help='directory with Xcode project files') args = parser.parse_args() if args.profile: write('Profiling...') utils.profile('call_command(args, parser)', locals(), globals()) else: call_command(args) def call_command(args): histogram(args, utcoffset=args.utcoffset) if __name__ == '__main__': main()<|fim▁end|>
parser.add_argument('--startyear', type=int, default=2006) parser.add_argument('--endyear', type=int, default=2014) parser.add_argument('-n', '--max-files', action='store', type=int, default=None, help='maximum number of files to process') parser.add_argument('--max-firstnames', action='store', type=int, default=None, help='maximum number first names to consider')
<|file_name|>ConfigCommandBase.java<|end_file_name|><|fim▁begin|><|fim▁hole|> public abstract class ConfigCommandBase<T extends VdcActionParametersBase> extends CommandBase<T> { protected ConfigCommandBase(T parameters) { super(parameters); } }<|fim▁end|>
package org.ovirt.engine.core.bll; import org.ovirt.engine.core.common.action.VdcActionParametersBase;
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python ## globals primes_set = set() primes_list = [] def is_prime(n): limit = int(round(sqrt(n))) i = 2 while True: if i > limit: return True if n % i == 0: return False def find_prime_permutations(n): "find the prime permutations including n" global primes_set assert n >= 1000 and n <= 9999 perm_set = set() s = str(n) for i in xrange(4): for j in xrange(4): if j == i: continue for k in xrange(4): if k == i or k == j: continue for l in xrange(4): if l == i or l == j or l == k: continue s2 = s[i] + s[j] + s[k] + s[l] n2 = int(s2) if n2 in primes_set: perm_set.add(n2) return perm_set <|fim▁hole|> return None for i in xrange(1, len(l)-1): # not either end for j in xrange(0, i): n = l[i]*2 - l[j] if n in _set and n != l[i] and n != l[j]: return (l[j], l[i], n) return None if __name__ == '__main__': if len(primes_set) == 0: # if not initialized for i in xrange(1001, 9999+1): if is_prime(i): primes_set.add(i) primes_list.append(i) solutions = set() for p in primes_list: prime_perm_set = find_prime_permutations(p) result = find_arith_seq(prime_perm_set) if result is not None: solutions.add(result) print repr(solutions)<|fim▁end|>
def find_arith_seq(_set): l = sorted(list(_set)) if len(l) < 3:
<|file_name|>borrowck-univariant-enum.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::cell::Cell; use std::gc::GC; <|fim▁hole|>pub fn main() { // Test that borrowck treats enums with a single variant // specially. let x = box(GC) Cell::new(5); let y = box(GC) Cell::new(newvar(3)); let z = match y.get() { newvar(b) => { x.set(x.get() + 1); x.get() * b } }; assert_eq!(z, 18); }<|fim▁end|>
enum newtype { newvar(int) }
<|file_name|>__SharedActionCreators.js<|end_file_name|><|fim▁begin|>"use strict"; import assert from "assert"; import sinon from "sinon"; import testPlease from "../helpers/testPlease"; import { genericDouble } from "../helpers/doubles"; import * as JobsActionCreators from "../../../src/js/actions/JobsActionCreators"; import * as JobItemsActionCreators from "../../../src/js/actions/JobItemsActionCreators"; import * as ProductActionCreators from "../../../src/js/actions/ProductActionCreators"; import * as SharedActionCreators from "../../../src/js/actions/SharedActionCreators"; import * as JobsAPI from "../../../src/js/api/JobsAPI"; const toTest = [ { fn: "startLoading", type: "IS_LOADING", give: [], desc: "no data" }, { fn: "changeItem", type: "CHANGE_SINGLE_JOB_ITEM", give: [{item: "newItem"}], want: {item: "newItem"},<|fim▁hole|> { fn: "changeDetails", type: "CHANGE_SINGLE_JOB_DETAILS", give: [{details: "newDetails"}], want: {details: "newDetails"}, desc: "a new job details" }, { fn: "sortBy", type: "SORT_ONE", give: ["doctor"], want: "doctor", desc: "a sort string" }, { fn: "setCurrentY", type: "SET_CURRENT_Y", give: [158], want: 158, desc: "a new y position" }, { fn: "setTableHeight", type: "SET_TABLE_HEIGHT", give: [450], want: 450, desc: "a new table height" } ]; describe("SharedActionCreators", () => { describe(".externalSortBy", () => { let result, sortStub, jSpy, jiSpy, pSpy; before(() => { sortStub = sinon.stub(JobsAPI, "getSortedThings", (r, f, d) => { result = [r, f, d]; }); jSpy = sinon.spy(JobsActionCreators, "sortBy"); jiSpy = sinon.spy(JobItemsActionCreators, "sortBy"); pSpy = sinon.spy(ProductActionCreators, "sortBy"); }); after(() => { [sortStub, jSpy, jiSpy, pSpy].forEach(e => e.restore()); }); it("#calls the relevant sortBy action creator", () => { SharedActionCreators.externalSortBy("jobs", "dawg", false); assert(jSpy.called); assert.deepEqual(jSpy.firstCall.args, ["dawg"]); SharedActionCreators.externalSortBy("items", "caht", true); assert(jSpy.called); assert.deepEqual(jiSpy.firstCall.args, ["caht"]); SharedActionCreators.externalSortBy("products", "forks", false); assert(pSpy.called); assert.deepEqual(pSpy.firstCall.args, ["forks"]); }); it("#calls JobsAPI.getSortedThings with the endpoint," + " sort field and sort direction, defaulting to false", () => { const args1 = ["items", "chickens", false]; const args2 = ["jobs", "frogs", true]; SharedActionCreators.externalSortBy(args1[0], args1[1], args1[2]); assert.deepEqual(result, [args1[0], args1[1], !args1[2]]); SharedActionCreators.externalSortBy(args2[0], args2[1], args2[2]); assert.deepEqual(result, [args2[0], args2[1], !args2[2]]); }); }); describe("API callers", () => { let dubbel; let result = {}; before(() => { dubbel = genericDouble(JobsAPI, result); }); after(() => dubbel()); it(".getSelections calls JobsAPI.getSelections", () => { SharedActionCreators.getSelections(); assert.equal(result.getSelections, "calledWithNoArgs"); }); it(".getAllProducts calls JobsAPI.getAllProducts", () => { SharedActionCreators.getAllProducts(); assert.equal(result.getAllProducts, "calledWithNoArgs"); }); it(".getUserProfile calls JobsAPI.getUserProfile", () => { SharedActionCreators.getUserProfile(); assert.equal(result.getUserProfile, "calledWithNoArgs"); }); it(".saveDetails calls JobsAPI.saveDetails with an id and details", () => { SharedActionCreators.saveDetails(123, "hello hi"); assert.deepEqual(result.saveDetails, [123, "hello hi"]); }); it(".saveItem calls JobsAPI.saveItem with an id and item", () => { SharedActionCreators.saveItem(123, "hello hi"); assert.deepEqual(result.saveItem, [123, "hello hi"]); }); it(".createItem calls JobsAPI.createItem with an id and blueprint", () => { SharedActionCreators.createItem(123, {name: "hello hi"}); assert.deepEqual(result.createSingleJobItem, [123, {name: "hello hi"}]); }); it(".deleteItem calls JobsAPI.deleteItem with any arg and an immutable object", () => { SharedActionCreators.deleteItem(null, { get(thing) { return thing; } }); assert.deepEqual(result.deleteSingleItem, "item_id"); }); }); testPlease(toTest, SharedActionCreators); });<|fim▁end|>
desc: "a new job item" },
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */<|fim▁hole|>#![feature(path, io, env)] use std::env; use std::old_path::Path; use std::old_io::process::{Command, ProcessExit, StdioContainer}; use std::old_io::File; fn main() { let python = if Command::new("python2.7").arg("--version").status() == Ok(ProcessExit::ExitStatus(0)) { "python2.7" } else { "python" }; let style = Path::new(file!()).dir_path(); let mako = style.join("Mako-0.9.1.zip"); let template = style.join("properties.mako.rs"); let result = Command::new(python) .env("PYTHONPATH", mako.as_str().unwrap()) .env("TEMPLATE", template.as_str().unwrap()) .arg("-c") .arg("from os import environ; from mako.template import Template; print(Template(filename=environ['TEMPLATE']).render())") .stderr(StdioContainer::InheritFd(2)) .output() .unwrap(); assert_eq!(result.status, ProcessExit::ExitStatus(0)); let out = Path::new(env::var_string("OUT_DIR").unwrap()); File::create(&out.join("properties.rs")).unwrap().write_all(&*result.output).unwrap(); }<|fim▁end|>
<|file_name|>0_1_initial.py<|end_file_name|><|fim▁begin|># Copyright 2014 OpenStack Foundation. # Copyright 2014 Intel Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Icehouse Initial Revision ID: 0_1 Revises: None Create Date: 2014-02-19 17:23:47.705197 """ # revision identifiers, used by Alembic. revision = '0_1' down_revision = None import uuid from alembic import op import sqlalchemy as sa from sqlalchemy.dialects.mysql import MEDIUMTEXT def _generate_unicode_uuid(): return unicode(str(uuid.uuid4())) def MediumText(): return sa.Text().with_variant(MEDIUMTEXT(), 'mysql') def _id_column(): return sa.Column('id', sa.String(36), primary_key=True, default=_generate_unicode_uuid) def upgrade(): op.create_table( 'computehosts', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), _id_column(), sa.Column('vcpus', sa.Integer(), nullable=False), sa.Column('cpu_info', MediumText(), nullable=False), sa.Column('hypervisor_type', MediumText(), nullable=False), sa.Column('hypervisor_version', sa.Integer(), nullable=False), sa.Column('hypervisor_hostname', sa.String(length=255), nullable=True), sa.Column('memory_mb', sa.Integer(), nullable=False), sa.Column('local_gb', sa.Integer(), nullable=False), sa.Column('status', sa.String(length=13)), sa.PrimaryKeyConstraint('id')) op.create_table( 'leases', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), _id_column(), sa.Column('name', sa.String(length=80), nullable=False), sa.Column('user_id', sa.String(length=255), nullable=True),<|fim▁hole|> sa.Column('tenant_id', sa.String(length=255), nullable=True), sa.Column('start_date', sa.DateTime(), nullable=False), sa.Column('end_date', sa.DateTime(), nullable=False), sa.Column('trust_id', sa.String(length=36)), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name')) op.create_table( 'reservations', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), _id_column(), sa.Column('lease_id', sa.String(length=36), nullable=False), sa.Column('resource_id', sa.String(length=36)), sa.Column('resource_type', sa.String(length=66)), sa.Column('status', sa.String(length=13)), sa.ForeignKeyConstraint(['lease_id'], ['leases.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'computehost_extra_capabilities', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), _id_column(), sa.Column('computehost_id', sa.String(length=36), nullable=True), sa.Column('capability_name', sa.String(length=64), nullable=False), sa.Column('capability_value', MediumText(), nullable=False), sa.ForeignKeyConstraint(['computehost_id'], ['computehosts.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'events', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), _id_column(), sa.Column('lease_id', sa.String(length=36), nullable=True), sa.Column('event_type', sa.String(length=66)), sa.Column('time', sa.DateTime()), sa.Column('status', sa.String(length=13)), sa.ForeignKeyConstraint(['lease_id'], ['leases.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'computehost_allocations', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), _id_column(), sa.Column('compute_host_id', sa.String(length=36), nullable=True), sa.Column('reservation_id', sa.String(length=36), nullable=True), sa.ForeignKeyConstraint(['compute_host_id'], ['computehosts.id'], ), sa.ForeignKeyConstraint(['reservation_id'], ['reservations.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'computehost_reservations', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), _id_column(), sa.Column('reservation_id', sa.String(length=36), nullable=True), sa.Column('resource_properties', MediumText()), sa.Column('count_range', sa.String(length=36)), sa.Column('hypervisor_properties', MediumText()), sa.Column('status', sa.String(length=13)), sa.ForeignKeyConstraint(['reservation_id'], ['reservations.id'], ), sa.PrimaryKeyConstraint('id')) def downgrade(): op.drop_table('computehost_extra_capabilities') op.drop_table('computehost_allocations') op.drop_table('computehost_reservations') op.drop_table('computehosts') op.drop_table('reservations') op.drop_table('events') op.drop_table('leases')<|fim▁end|>
<|file_name|>taskcluster-spark-dogfood.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#!/usr/bin/env python import os.path config = { "default_vcs": "tc-vcs", "default_actions": [ 'checkout-sources', 'build', 'build-symbols', 'make-updates', 'prep-upload', 'submit-to-balrog' ], "balrog_credentials_file": "balrog_credentials", "nightly_build": True, "env": { "GAIA_OPTIMIZE": "1", "B2G_UPDATER": "1", "LIGHTSABER": "1", "DOGFOOD": "1", "B2G_UPDATE_CHANNEL": "dogfood", "BOWER_FLAGS": "--allow-root", "B2G_PATH": "%(work_dir)s", "GAIA_DISTRIBUTION_DIR": "%(work_dir)s/gaia/distros/spark", "WGET_OPTS": "-c -q" }, "is_automation": True, "repo_remote_mappings": { 'https://android.googlesource.com/': 'https://git.mozilla.org/external/aosp', 'git://codeaurora.org/': 'https://git.mozilla.org/external/caf', 'https://git.mozilla.org/b2g': 'https://git.mozilla.org/b2g', 'git://github.com/mozilla-b2g/': 'https://git.mozilla.org/b2g', 'git://github.com/mozilla/': 'https://git.mozilla.org/b2g', 'https://git.mozilla.org/releases': 'https://git.mozilla.org/releases', 'http://android.git.linaro.org/git-ro/': 'https://git.mozilla.org/external/linaro', 'git://github.com/apitrace/': 'https://git.mozilla.org/external/apitrace', }, }<|fim▁end|>
<|file_name|>reducer.spec.js<|end_file_name|><|fim▁begin|>import reducer, { addFlag, fetchAll } from '../reducer' import fetchAllSuccess from './__fixtures__/fetch_all_success' const INITIAL_STATE = reducer(undefined, {}) test('has correct defaults', () => { snapshot(INITIAL_STATE) }) <|fim▁hole|>test('fetch all', () => { snapshotReducer(reducer, INITIAL_STATE, fetchAll(fetchAllSuccess)) }) test('add flag', () => { snapshotReducer(reducer, INITIAL_STATE, addFlag(42), addFlag(43), addFlag(44)) })<|fim▁end|>
<|file_name|>TotalVelocity.py<|end_file_name|><|fim▁begin|># Copyright (C) 2014 Pierre de Buyl # Copyright (C) 2012,2013 # Max Planck Institute for Polymer Research # Copyright (C) 2008,2009,2010,2011 # Max-Planck-Institute for Polymer Research & Fraunhofer SCAI # # This file is part of ESPResSo++. # # ESPResSo++ is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ESPResSo++ is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. r""" ************************************* **espressopp.analysis.TotalVelocity** ************************************* .. function:: espressopp.analysis.TotalVelocity(system) :param system: The system object. :type system: espressopp.System .. function:: espressopp.analysis.TotalVelocity.compute() Compute the total velocity of the system. :rtype: float .. function:: espressopp.analysis.TotalVelocity.reset() Subtract the total velocity of the system from every particle. Examples --------- Reset the velocity ++++++++++++++++++++ >>> total_velocity = espressopp.analysis.TotalVelocity(system) >>> total_velocity.reset() Extension to integrator ++++++++++++++++++++++++++++++++++++++++++++ This extension can also be attached to integrator and run `reset()` every `n-th` steps. >>> total_velocity = espressopp.analysis.TotalVelocity(system) >>> ext_remove_com = espressopp.analysis.ExtAnalyze(total_velocity, 10) >>> integrator.addExtension(ext_remove_com)<|fim▁hole|>from espressopp import pmi from espressopp.analysis.Observable import * from _espressopp import analysis_TotalVelocity class TotalVelocityLocal(ObservableLocal, analysis_TotalVelocity): def __init__(self, system): if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup(): cxxinit(self, analysis_TotalVelocity, system) def compute(self): return self.cxxclass.compute(self) def reset(self): return self.cxxclass.reset(self) if pmi.isController : class TotalVelocity(Observable): __metaclass__ = pmi.Proxy pmiproxydefs = dict( cls = 'espressopp.analysis.TotalVelocityLocal', pmicall = [ "compute", "reset" ], pmiproperty = ["v"] )<|fim▁end|>
""" from espressopp.esutil import cxxinit
<|file_name|>RasterizerTaskSVGConverterController.java<|end_file_name|><|fim▁begin|>/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.tools.ant.taskdefs.optional; // -- Batik classes ---------------------------------------------------------- import org.apache.batik.transcoder.Transcoder; import org.apache.batik.apps.rasterizer.SVGConverterController; import org.apache.batik.apps.rasterizer.SVGConverterSource; // -- Ant classes ------------------------------------------------------------ import org.apache.tools.ant.Task; // -- Java SDK classes ------------------------------------------------------- import java.io.File; import java.util.Map; import java.util.List; /** * Implements simple controller for the <code>SVGConverter</code> operation. * * <p>This is almost the same as the * {@link org.apache.batik.apps.rasterizer.DefaultSVGConverterController DefaultSVGConverterController} * except this produces error message when the conversion fails.</p> * * <p>See {@link SVGConverterController} for the method documentation.</p> * * @see SVGConverterController SVGConverterController * @see org.apache.batik.apps.rasterizer.DefaultSVGConverterController DefaultSVGConverterController * * @author <a href="mailto:[email protected]">Henri Ruini</a> * @version $Id: RasterizerTaskSVGConverterController.java 479617 2006-11-27 13:43:51Z dvholten $ */ public class RasterizerTaskSVGConverterController implements SVGConverterController { // -- Variables ---------------------------------------------------------- /** Ant task that is used to log messages. */ protected Task executingTask = null; // -- Constructors ------------------------------------------------------- /** * Don't allow public usage. */ protected RasterizerTaskSVGConverterController() { } /** * Sets the given Ant task to receive log messages. * * @param task Ant task. The value can be <code>null</code> when log messages won't be written. */ public RasterizerTaskSVGConverterController(Task task) { executingTask = task; } // -- Public interface --------------------------------------------------- public boolean proceedWithComputedTask(Transcoder transcoder, Map hints, List sources, List dest){ return true; } public boolean proceedWithSourceTranscoding(SVGConverterSource source, File dest) { return true; } public boolean proceedOnSourceTranscodingFailure(SVGConverterSource source, File dest, String errorCode){ if(executingTask != null) { executingTask.log("Unable to rasterize image '" + source.getName() + "' to '" + dest.getAbsolutePath() + "': " + errorCode); }<|fim▁hole|> public void onSourceTranscodingSuccess(SVGConverterSource source, File dest){ } }<|fim▁end|>
return true; }
<|file_name|>sleepy.py<|end_file_name|><|fim▁begin|># Speak.activity # A simple front end to the espeak text-to-speech engine on the XO laptop # http://wiki.laptop.org/go/Speak # # Copyright (C) 2008 Joshua Minor # Copyright (C) 2014 Walter Bender # This file is part of Speak.activity # # Parts of Speak.activity are based on code from Measure.activity # Copyright (C) 2007 Arjun Sarwal - [email protected] # # Speak.activity is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Speak.activity is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Speak.activity. If not, see <http://www.gnu.org/licenses/>. from gi.repository import Gdk from gi.repository import GdkPixbuf from eye import Eye from utils import svg_str_to_pixbuf <|fim▁hole|> Eye.__init__(self, fill_color) self._pixbuf = svg_str_to_pixbuf(eye_svg()) def draw(self, widget, cr): bounds = self.get_allocation() # background cr.set_source_rgba(*self.fill_color.get_rgba()) cr.rectangle(0, 0, bounds.width, bounds.height) cr.fill() w = h = min(bounds.width, bounds.height) x = int((bounds.width - w) // 2) y = int((bounds.height - h) // 2) pixbuf = self._pixbuf.scale_simple(w, h, GdkPixbuf.InterpType.BILINEAR) cr.translate(x + w / 2., y + h / 2.) cr.translate(-x - w / 2., -y - h / 2.) Gdk.cairo_set_source_pixbuf(cr, pixbuf, x, y) cr.rectangle(x, y, w, h) cr.fill() return True def eye_svg(): return \ '<?xml version="1.0" encoding="UTF-8" standalone="no"?>\n' + \ '<svg\n' + \ ' xmlns:svg="http://www.w3.org/2000/svg"\n' + \ ' xmlns="http://www.w3.org/2000/svg"\n' + \ ' version="1.1"\n' + \ ' width="300"\n' + \ ' height="300">\n' + \ ' <path\n' + \ ' d="m 260.26893,151.09803 c -6.07398,14.55176 -15.05894,27.89881 -26.27797,39.03563 -11.21904,11.13683 -24.66333,20.05466 -39.32004,26.08168 -14.65671,6.02702 -30.51431,9.15849 -46.37814,9.15849 -15.86384,0 -31.72144,-3.13147 -46.37815,-9.15849 C 87.257925,210.18832 73.813631,201.27049 62.594594,190.13366 51.375557,178.99684 42.3906,165.64979 36.316616,151.09803"\n' + \ ' style="fill:none;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:13.18636799;stroke-linecap:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none" />\n' + \ '</svg>\n'<|fim▁end|>
class Sleepy(Eye): def __init__(self, fill_color):
<|file_name|>Content_var_Choice110.java<|end_file_name|><|fim▁begin|>package tmp.generated_xhtml;<|fim▁hole|>import cide.gast.*; import cide.gparser.*; import cide.greferences.*; import java.util.*; public class Content_var_Choice110 extends Content_var_Choice1 { public Content_var_Choice110(Element_i element_i, Token firstToken, Token lastToken) { super(new Property[] { new PropertyOne<Element_i>("element_i", element_i) }, firstToken, lastToken); } public Content_var_Choice110(Property[] properties, IToken firstToken, IToken lastToken) { super(properties,firstToken,lastToken); } public ASTNode deepCopy() { return new Content_var_Choice110(cloneProperties(),firstToken,lastToken); } public Element_i getElement_i() { return ((PropertyOne<Element_i>)getProperty("element_i")).getValue(); } }<|fim▁end|>
<|file_name|>membership.rs<|end_file_name|><|fim▁begin|>/* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This software may be used and distributed according to the terms of the * GNU General Public License version 2. */ use anyhow::Result; use async_trait::async_trait; use std::panic::RefUnwindSafe; use std::sync::Arc; use crate::MononokeIdentitySet; pub type ArcMembershipChecker = Arc<dyn MembershipChecker + Send + Sync + RefUnwindSafe + 'static>; pub type BoxMembershipChecker = Box<dyn MembershipChecker + Send + Sync + RefUnwindSafe + 'static>; <|fim▁hole|>pub trait MembershipChecker { async fn is_member(&self, identities: &MononokeIdentitySet) -> Result<bool>; } pub struct MembershipCheckerBuilder {} impl MembershipCheckerBuilder { pub fn always_member() -> BoxMembershipChecker { Box::new(AlwaysMember {}) } pub fn never_member() -> BoxMembershipChecker { Box::new(NeverMember {}) } pub fn allowlist_checker(allowlist: MononokeIdentitySet) -> BoxMembershipChecker { Box::new(AllowlistChecker { allowlist }) } } struct AlwaysMember {} #[async_trait] impl MembershipChecker for AlwaysMember { async fn is_member(&self, _identities: &MononokeIdentitySet) -> Result<bool> { Ok(true) } } struct NeverMember {} #[async_trait] impl MembershipChecker for NeverMember { async fn is_member(&self, _identities: &MononokeIdentitySet) -> Result<bool> { Ok(false) } } struct AllowlistChecker { allowlist: MononokeIdentitySet, } #[async_trait] impl MembershipChecker for AllowlistChecker { async fn is_member(&self, identities: &MononokeIdentitySet) -> Result<bool> { Ok(!self.allowlist.is_disjoint(identities)) } }<|fim▁end|>
#[async_trait]
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod fmt; mod key; mod strfmt; mod test_trait; use super::FmtError; #[test] fn test_error() {<|fim▁hole|> // just make sure this compiles mostly let err = FmtError::Invalid("fmt error".to_string()); let v = err.to_string(); println!("{}", v); }<|fim▁end|>
<|file_name|>cmd_whowas.py<|end_file_name|><|fim▁begin|>from twisted.plugin import IPlugin from twisted.words.protocols import irc from txircd.config import ConfigValidationError from txircd.module_interface import Command, ICommand, IModuleData, ModuleData from txircd.utils import durationToSeconds, ipAddressToShow, ircLower, now from zope.interface import implementer from datetime import datetime, timedelta from typing import Any, Callable, Dict, List, Optional, Tuple irc.RPL_WHOWASIP = "379" @implementer(IPlugin, IModuleData, ICommand) class WhowasCommand(ModuleData, Command): name = "WhowasCommand" core = True def actions(self) -> List[Tuple[str, int, Callable]]: return [ ("quit", 10, self.addUserToWhowas), ("remotequit", 10, self.addUserToWhowas), ("localquit", 10, self.addUserToWhowas) ] def userCommands(self) -> List[Tuple[str, int, Command]]: return [ ("WHOWAS", 1, self) ] <|fim▁hole|> def load(self) -> None: if "whowas" not in self.ircd.storage: self.ircd.storage["whowas"] = {} def verifyConfig(self, config: Dict[str, Any]) -> None: if "whowas_duration" in config and not isinstance(config["whowas_duration"], str) and not isinstance(config["whowas_duration"], int): raise ConfigValidationError("whowas_duration", "value must be an integer or a duration string") if "whowas_max_entries" in config and (not isinstance(config["whowas_max_entries"], int) or config["whowas_max_entries"] < 0): raise ConfigValidationError("whowas_max_entries", "invalid number") def removeOldEntries(self, whowasEntries: List[Dict[str, Any]]) -> List[Dict[str, Any]]: expireDuration = durationToSeconds(self.ircd.config.get("whowas_duration", "1d")) maxCount = self.ircd.config.get("whowas_max_entries", 10) while whowasEntries and len(whowasEntries) > maxCount: whowasEntries.pop(0) expireDifference = timedelta(seconds=expireDuration) expireTime = now() - expireDifference while whowasEntries and whowasEntries[0]["when"] < expireTime: whowasEntries.pop(0) return whowasEntries def addUserToWhowas(self, user: "IRCUser", reason: str, fromServer: "IRCServer" = None) -> None: if not user.isRegistered(): # user never registered a nick, so no whowas entry to add return lowerNick = ircLower(user.nick) allWhowas = self.ircd.storage["whowas"] if lowerNick in allWhowas: whowasEntries = allWhowas[lowerNick] else: whowasEntries = [] serverName = self.ircd.name if user.uuid[:3] != self.ircd.serverID: serverName = self.ircd.servers[user.uuid[:3]].name whowasEntries.append({ "nick": user.nick, "ident": user.ident, "host": user.host(), "realhost": user.realHost, "ip": ipAddressToShow(user.ip), "gecos": user.gecos, "server": serverName, "when": now() }) whowasEntries = self.removeOldEntries(whowasEntries) if whowasEntries: allWhowas[lowerNick] = whowasEntries elif lowerNick in allWhowas: del allWhowas[lowerNick] def parseParams(self, user: "IRCUser", params: List[str], prefix: str, tags: Dict[str, Optional[str]]) -> Optional[Dict[Any, Any]]: if not params: user.sendSingleError("WhowasCmd", irc.ERR_NEEDMOREPARAMS, "WHOWAS", "Not enough parameters") return None lowerParam = ircLower(params[0]) if lowerParam not in self.ircd.storage["whowas"]: user.sendSingleError("WhowasNick", irc.ERR_WASNOSUCHNICK, params[0], "There was no such nickname") return None return { "nick": lowerParam, "param": params[0] } def execute(self, user: "IRCUser", data: Dict[Any, Any]) -> bool: nick = data["nick"] allWhowas = self.ircd.storage["whowas"] whowasEntries = allWhowas[nick] whowasEntries = self.removeOldEntries(whowasEntries) if not whowasEntries: del allWhowas[nick] self.ircd.storage["whowas"] = allWhowas user.sendMessage(irc.ERR_WASNOSUCHNICK, data["param"], "There was no such nickname") return True allWhowas[nick] = whowasEntries # Save back to the list excluding the removed entries self.ircd.storage["whowas"] = allWhowas for entry in whowasEntries: entryNick = entry["nick"] user.sendMessage(irc.RPL_WHOWASUSER, entryNick, entry["ident"], entry["host"], "*", entry["gecos"]) if self.ircd.runActionUntilValue("userhasoperpermission", user, "whowas-host", users=[user]): user.sendMessage(irc.RPL_WHOWASIP, entryNick, "was connecting from {}@{} {}".format(entry["ident"], entry["realhost"], entry["ip"])) user.sendMessage(irc.RPL_WHOISSERVER, entryNick, entry["server"], str(entry["when"])) user.sendMessage(irc.RPL_ENDOFWHOWAS, nick, "End of WHOWAS") return True whowasCmd = WhowasCommand()<|fim▁end|>
<|file_name|>kociemba.js<|end_file_name|><|fim▁begin|>"use strict"; var utils = require('./utils.js'); var nChooseK = utils.nChooseK; var tic = utils.tic; var toc = utils.toc; var movesDef = { "U" : [[0,0,0,0, 0,0,0,0], [1,2,3,0, 4,5,6,7], [0,0,0,0, 0,0,0,0, 0,0,0,0], [1,2,3,0, 4,5,6,7, 8,9,10,11], [0,1,2,3,4,5]], "U2": [[0,0,0,0, 0,0,0,0], [2,3,0,1, 4,5,6,7], [0,0,0,0, 0,0,0,0, 0,0,0,0], [2,3,0,1, 4,5,6,7, 8,9,10,11], [0,1,2,3,4,5]], "U'": [[0,0,0,0, 0,0,0,0], [3,0,1,2, 4,5,6,7], [0,0,0,0, 0,0,0,0, 0,0,0,0], [3,0,1,2, 4,5,6,7, 8,9,10,11], [0,1,2,3,4,5]], "R" : [[0,1,2,0, 0,2,1,0], [0,5,1,3, 4,6,2,7], [0,0,0,0, 0,0,0,0, 0,0,0,0], [0,5,2,3, 4,9,1,7, 8,6,10,11], [0,1,2,3,4,5]], "R2": [[0,0,0,0, 0,0,0,0], [0,6,5,3, 4,2,1,7], [0,0,0,0, 0,0,0,0, 0,0,0,0], [0,9,2,3, 4,6,5,7, 8,1,10,11], [0,1,2,3,4,5]], "R'": [[0,1,2,0, 0,2,1,0], [0,2,6,3, 4,1,5,7], [0,0,0,0, 0,0,0,0, 0,0,0,0], [0,6,2,3, 4,1,9,7, 8,5,10,11], [0,1,2,3,4,5]], "F" : [[0,0,1,2, 0,0,2,1], [0,1,6,2, 4,5,7,3], [0,0,1,0, 0,0,1,1, 0,0,1,0], [0,1,6,3, 4,5,10,2, 8,9,7,11], [0,1,2,3,4,5]], "F2": [[0,0,0,0, 0,0,0,0], [0,1,7,6, 4,5,3,2], [0,0,0,0, 0,0,0,0, 0,0,0,0], [0,1,10,3, 4,5,7,6, 8,9,2,11], [0,1,2,3,4,5]], "F'": [[0,0,1,2, 0,0,2,1], [0,1,3,7, 4,5,2,6], [0,0,1,0, 0,0,1,1, 0,0,1,0], [0,1,7,3, 4,5,2,10, 8,9,6,11], [0,1,2,3,4,5]], "L" : [[2,0,0,1, 1,0,0,2], [3,1,2,7, 0,5,6,4], [0,0,0,0, 0,0,0,0, 0,0,0,0], [0,1,2,7, 3,5,6,11, 8,9,10,4], [0,1,2,3,4,5]], "L2": [[0,0,0,0, 0,0,0,0], [7,1,2,4, 3,5,6,0], [0,0,0,0, 0,0,0,0, 0,0,0,0], [0,1,2,11, 7,5,6,4, 8,9,10,3], [0,1,2,3,4,5]], "L'": [[2,0,0,1, 1,0,0,2], [4,1,2,0, 7,5,6,3], [0,0,0,0, 0,0,0,0, 0,0,0,0], [0,1,2,4, 11,5,6,3, 8,9,10,7], [0,1,2,3,4,5]], "D" : [[0,0,0,0, 0,0,0,0], [0,1,2,3, 7,4,5,6], [0,0,0,0, 0,0,0,0, 0,0,0,0], [0,1,2,3, 4,5,6,7, 11,8,9,10], [0,1,2,3,4,5]], "D2": [[0,0,0,0, 0,0,0,0], [0,1,2,3, 6,7,4,5], [0,0,0,0, 0,0,0,0, 0,0,0,0], [0,1,2,3, 4,5,6,7, 10,11,8,9], [0,1,2,3,4,5]], "D'": [[0,0,0,0, 0,0,0,0], [0,1,2,3, 5,6,7,4], [0,0,0,0, 0,0,0,0, 0,0,0,0], [0,1,2,3, 4,5,6,7, 9,10,11,8], [0,1,2,3,4,5]], "B" : [[1,2,0,0, 2,1,0,0], [4,0,2,3, 5,1,6,7], [1,0,0,0, 1,1,0,0, 1,0,0,0], [4,1,2,3, 8,0,6,7, 5,9,10,11], [0,1,2,3,4,5]], "B2": [[0,0,0,0, 0,0,0,0], [5,4,2,3, 1,0,6,7], [0,0,0,0, 0,0,0,0, 0,0,0,0], [8,1,2,3, 5,4,6,7, 0,9,10,11], [0,1,2,3,4,5]], "B'": [[1,2,0,0, 2,1,0,0], [1,5,2,3, 0,4,6,7], [1,0,0,0, 1,1,0,0, 1,0,0,0], [5,1,2,3, 0,8,6,7, 4,9,10,11], [0,1,2,3,4,5]], "M" : [[0,0,0,0,0,0,0,0], [0,1,2,3,4,5,6,7], [1,0,1,0,0,0,0,0,1,0,1,0], [2,1,10,3,4,5,6,7,0,9,8,11],[1,5,2,0,4,3]], "M2": [[0,0,0,0,0,0,0,0], [0,1,2,3,4,5,6,7], [0,0,0,0,0,0,0,0,0,0,0,0], [10,1,8,3,4,5,6,7,2,9,0,11],[5,3,2,1,4,0]], "M'": [[0,0,0,0,0,0,0,0], [0,1,2,3,4,5,6,7], [1,0,1,0,0,0,0,0,1,0,1,0], [8,1,0,3,4,5,6,7,10,9,2,11],[3,0,2,5,4,1]], "E" : [[0,0,0,0,0,0,0,0], [0,1,2,3,4,5,6,7], [0,0,0,0,1,1,1,1,0,0,0,0], [0,1,2,3,5,6,7,4,8,9,10,11], [0,4,1,2,3,5]], "E2": [[0,0,0,0,0,0,0,0], [0,1,2,3,4,5,6,7], [0,0,0,0,0,0,0,0,0,0,0,0], [0,1,2,3,6,7,4,5,8,9,10,11], [0,3,4,1,2,5]], "E'": [[0,0,0,0,0,0,0,0], [0,1,2,3,4,5,6,7], [0,0,0,0,1,1,1,1,0,0,0,0], [0,1,2,3,7,4,5,6,8,9,10,11], [0,2,3,4,1,5]], "S" : [[0,0,0,0,0,0,0,0], [0,1,2,3,4,5,6,7], [0,1,0,1,0,0,0,0,0,1,0,1], [0,9,2,1,4,5,6,7,8,11,10,3], [2,1,5,3,0,4]], "S2": [[0,0,0,0,0,0,0,0], [0,1,2,3,4,5,6,7], [0,0,0,0,0,0,0,0,0,0,0,0], [0,11,2,9,4,5,6,7,8,3,10,1], [5,1,4,3,2,0]], "S'": [[0,0,0,0,0,0,0,0], [0,1,2,3,4,5,6,7], [0,1,0,1,0,0,0,0,0,1,0,1], [0,3,2,11,4,5,6,7,8,1,10,9], [4,1,0,3,5,2]], "u" : [[0,0,0,0,0,0,0,0], [1,2,3,0,4,5,6,7], [0,0,0,0,1,1,1,1,0,0,0,0], [1,2,3,0,5,6,7,4,8,9,10,11], [0,4,1,2,3,5]], "u2": [[0,0,0,0,0,0,0,0], [2,3,0,1,4,5,6,7], [0,0,0,0,0,0,0,0,0,0,0,0], [2,3,0,1,6,7,4,5,8,9,10,11], [0,3,4,1,2,5]], "u'": [[0,0,0,0,0,0,0,0], [3,0,1,2,4,5,6,7], [0,0,0,0,1,1,1,1,0,0,0,0], [3,0,1,2,7,4,5,6,8,9,10,11], [0,2,3,4,1,5]], "r" : [[0,1,2,0,0,2,1,0], [0,5,1,3,4,6,2,7], [1,0,1,0,0,0,0,0,1,0,1,0], [8,5,0,3,4,9,1,7,10,6,2,11], [3,0,2,5,4,1]], "r2": [[0,0,0,0,0,0,0,0], [0,6,5,3,4,2,1,7], [0,0,0,0,0,0,0,0,0,0,0,0], [10,9,8,3,4,6,5,7,2,1,0,11], [5,3,2,1,4,0]], "r'": [[0,1,2,0,0,2,1,0], [0,2,6,3,4,1,5,7], [1,0,1,0,0,0,0,0,1,0,1,0], [2,6,10,3,4,1,9,7,0,5,8,11], [1,5,2,0,4,3]], "f" : [[0,0,1,2,0,0,2,1], [0,1,6,2,4,5,7,3], [0,1,1,1,0,0,1,1,0,1,1,1], [0,9,6,1,4,5,10,2,8,11,7,3], [2,1,5,3,0,4]], "f2": [[0,0,0,0,0,0,0,0], [0,1,7,6,4,5,3,2], [0,0,0,0,0,0,0,0,0,0,0,0], [0,11,10,9,4,5,7,6,8,3,2,1], [5,1,4,3,2,0]], "f'": [[0,0,1,2,0,0,2,1], [0,1,3,7,4,5,2,6], [0,1,1,1,0,0,1,1,0,1,1,1], [0,3,7,11,4,5,2,10,8,1,6,9], [4,1,0,3,5,2]], "l" : [[2,0,0,1,1,0,0,2], [3,1,2,7,0,5,6,4], [1,0,1,0,0,0,0,0,1,0,1,0], [2,1,10,7,3,5,6,11,0,9,8,4], [1,5,2,0,4,3]], "l2": [[0,0,0,0,0,0,0,0], [7,1,2,4,3,5,6,0], [0,0,0,0,0,0,0,0,0,0,0,0], [10,1,8,11,7,5,6,4,2,9,0,3], [5,3,2,1,4,0]], "l'": [[2,0,0,1,1,0,0,2], [4,1,2,0,7,5,6,3], [1,0,1,0,0,0,0,0,1,0,1,0], [8,1,0,4,11,5,6,3,10,9,2,7], [3,0,2,5,4,1]], "b" : [[1,2,0,0,2,1,0,0], [4,0,2,3,5,1,6,7], [1,1,0,1,1,1,0,0,1,1,0,1], [4,3,2,11,8,0,6,7,5,1,10,9], [4,1,0,3,5,2]], "b2": [[0,0,0,0,0,0,0,0], [5,4,2,3,1,0,6,7], [0,0,0,0,0,0,0,0,0,0,0,0], [8,11,2,9,5,4,6,7,0,3,10,1], [5,1,4,3,2,0]], "b'": [[1,2,0,0,2,1,0,0], [1,5,2,3,0,4,6,7], [1,1,0,1,1,1,0,0,1,1,0,1], [5,9,2,1,0,8,6,7,4,11,10,3], [2,1,5,3,0,4]], "d" : [[0,0,0,0,0,0,0,0], [0,1,2,3,7,4,5,6], [0,0,0,0,1,1,1,1,0,0,0,0], [0,1,2,3,7,4,5,6,11,8,9,10], [0,2,3,4,1,5]], "d2": [[0,0,0,0,0,0,0,0], [0,1,2,3,6,7,4,5], [0,0,0,0,0,0,0,0,0,0,0,0], [0,1,2,3,6,7,4,5,10,11,8,9], [0,3,4,1,2,5]], "d'": [[0,0,0,0,0,0,0,0], [0,1,2,3,5,6,7,4], [0,0,0,0,1,1,1,1,0,0,0,0], [0,1,2,3,5,6,7,4,9,10,11,8], [0,4,1,2,3,5]], "x" : [[2,1,2,1, 1,2,1,2], [4,5,1,0, 7,6,2,3], [1,0,1,0,0,0,0,0,1,0,1,0], [8,5,0,4,11,9,1,3,10,6,2,7], [3,0,2,5,4,1]], "x2": [[0,0,0,0, 0,0,0,0], [7,6,5,4, 3,2,1,0], [0,0,0,0,0,0,0,0,0,0,0,0], [10,9,8,11,7,6,5,4,2,1,0,3], [5,3,2,1,4,0]], "x'": [[2,1,2,1, 1,2,1,2], [3,2,6,7, 0,1,5,4], [1,0,1,0,0,0,0,0,1,0,1,0], [2,6,10,7,3,1,9,11,0,5,8,4], [1,5,2,0,4,3]], "y" : [[0,0,0,0,0,0,0,0], [1,2,3,0,5,6,7,4], [0,0,0,0,1,1,1,1,0,0,0,0], [1,2,3,0,5,6,7,4,9,10,11,8], [0,4,1,2,3,5]], "y2": [[0,0,0,0,0,0,0,0], [2,3,0,1,6,7,4,5], [0,0,0,0,0,0,0,0,0,0,0,0], [2,3,0,1,6,7,4,5,10,11,8,9], [0,3,4,1,2,5]], "y'": [[0,0,0,0,0,0,0,0], [3,0,1,2,7,4,5,6], [0,0,0,0,1,1,1,1,0,0,0,0], [3,0,1,2,7,4,5,6,11,8,9,10], [0,2,3,4,1,5]], "z" : [[1,2,1,2,2,1,2,1], [1,5,6,2,0,4,7,3], [1,1,1,1,1,1,1,1,1,1,1,1], [5,9,6,1,0,8,10,2,4,11,7,3], [2,1,5,3,0,4]], "z2": [[0,0,0,0,0,0,0,0], [5,4,7,6,1,0,3,2], [0,0,0,0,0,0,0,0,0,0,0,0], [8,11,10,9,5,4,7,6,0,3,2,1], [5,1,4,3,2,0]], "z'": [[1,2,1,2,2,1,2,1], [4,0,3,7,5,1,2,6], [1,1,1,1,1,1,1,1,1,1,1,1], [4,3,7,11,8,0,2,10,5,1,6,9], [4,1,0,3,5,2]], "RLmirror": [[0,0,0,0,0,0,0,0], [1,0,3,2,5,4,7,6], [0,0,0,0,0,0,0,0,0,0,0,0], [0,3,2,1,5,4,7,6,8,11,10,9], [0,3,4,1,2,5]] // Not a real move, but useful for symmetry reductions }; var moveAxes = { "U" : 0, "U2": 0, "U'": 0, "R" : 1, "R2": 1, "R'": 1, "F" : 2, "F2": 2, "F'": 2, "L" : 1, "L2": 1, "L'": 1, "D" : 0, "D2": 0, "D'": 0, "B" : 2, "B2": 2, "B'": 2, "M" : 1, "M2": 1, "M'": 1, "E" : 0, "E2": 0, "E'": 0, "S" : 2, "S2": 2, "S'": 2, "u" : 0, "u2": 0, "u'": 0, "r" : 1, "r2": 1, "r'": 1, "f" : 2, "f2": 2, "f'": 2, "l" : 1, "l2": 1, "l'": 1, "b" : 2, "b2": 2, "b'": 2, "d" : 0, "d2": 0, "d'": 0, "x" : 1, "x2": 1, "x'": 1, "y" : 0, "y2": 0, "y'": 0, "z" : 2, "z2": 2, "z'": 2 }; /* Any naive search will search sequences containing consecutive moves on the same axis. If for example, one such sequence contains the sequence R L, then there is no point in performing a search with L R, as the resulting state will be identical. The following defines a hierarchy which specifies an arbitrarily chosen order to avoid searching duplicate move sequences. */ var trivialFollowSet = { "U" : 0, "U2": 0, "U'": 0, "R" : 1, "R2": 1, "R'": 1, "F" : 2, "F2": 2, "F'": 2, "L" : 3, "L2": 3, "L'": 3, "D" : 4, "D2": 4, "D'": 4, "B" : 5, "B2": 5, "B'": 5, "M" : 6, "M2": 6, "M'": 6, "E" : 7, "E2": 7, "E'": 7, "S" : 8, "S2": 8, "S'": 8, "u" : 9, "u2": 9, "u'": 9, "r" : 10, "r2": 10, "r'": 10, "f" : 11, "f2": 11, "f'": 11, "l" : 12, "l2": 12, "l'": 12, "b" : 13, "b2": 13, "b'": 13, "d" : 14, "d2": 14, "d'": 14, "x" : 15, "x2": 15, "x'": 15, "y" : 16, "y2": 16, "y'": 16, "z" : 17, "z2": 17, "z'": 17<|fim▁hole|>}; var isTrivialMove = function (move, moveset) { // A move is trivial if it is preceded by the same type or a more dominant move // on the same axis (see trivialFollowSet) for (let i=moveset.length-1; i>=0; i--) { if (moveAxes[moveset[i]] === moveAxes[move]) { if (trivialFollowSet[moveset[i]] >= trivialFollowSet[move]) { return true; // Trivial turn } } else { return false; // Preceded by turn on a different axis } } return false; // No trivial turns }; var applyMoveCP = function (move, cpstate) { let moveCP = movesDef[move][1]; let newState = cpstate.slice(); for (let i=0; i < moveCP.length; i++) { newState[moveCP[i]] = cpstate[i]; } return newState; }; var applyMoveEP = function (move, epstate) { let moveEP = movesDef[move][3]; let newState = epstate.slice(); for (let i=0; i < moveEP.length; i++) { newState[moveEP[i]] = epstate[i]; } return newState; }; var applyMoveCentr = function (move, centrstate) { let moveCentr = movesDef[move][4]; let newState = centrstate.slice(); for (let i=0; i < moveCentr.length; i++) { newState[moveCentr[i]] = centrstate[i]; } return newState; }; var applyMoveCO = function (move, costate) { let moveCO = movesDef[move][0]; let newState = applyMoveCP(move, costate); for (let i=0; i < moveCO.length; i++) { newState[i] = (newState[i] + moveCO[i]) % 3; } return newState; }; var applyMoveEO = function (move, eostate) { let moveEO = movesDef[move][2]; let newState = applyMoveEP(move, eostate); for (let i=0; i < moveEO.length; i++) { newState[i] = (newState[i] + moveEO[i]) % 2; } return newState; }; //======================================================================================= /* Phase 1 simple coordinates Three simple coordinates are used : CornerTwist, EdgeFlip and a ESlice coord These specify the corner orientation, the edge orientation and the location of the four edges that belong on the E slice. These coordinates may be reduced using symmetry. Adapted from: http://kociemba.org/math/twophase.htm */ var getCoordCornTwist = function (state) { // Corner orientation coordinate // Integer from 0 to 2186 let coord = 0; for (let i=0; i<7; i++) { // corner orientation of all but last corner coord = coord*3 + state[i]; } return coord; }; var getCoordEdgeFlip = function (state) { // Edge orientation coordinate // Integer from 0 to 2047 let coord = 0; for (let i=0; i<11; i++) { // edge orientation of all but last corner coord = coord*2 + state[i]; } return coord; }; var getCoordESlice = function (state) { // Permutation of the four E slice // Integer from 0 to 494 let coord = 0; let occupied = 0 // Loop through in a custom order. // The four correct locations come last so that the coordinate of the solved state is 0 let order = [0,1,2,3,8,9,10,11,4,5,6,7]; for (let idx=0; idx<order.length; idx++) { let i = order[idx]; if (state[i]>=5 && state[i]<=8) { occupied++; } else if (occupied) { coord += nChooseK(idx, occupied-1); } } return coord; }; //--------------------------------------------------------------------------------------- /* Inverses of the above four coordinates */ var invertCoordCornTwist =function (coord) { // Inverse function for corner orientation coordinate let state = new Array(8).fill(0); let temp = coord; let last = 15; // Smallest multiple of 3 such that (last >= 2*7) for (let i=6; i>=0; i--) { state[i] = temp % 3; last -= state[i]; temp = Math.floor(temp / 3); } // Last corner is set such that the sum mod 3 is zero. state[7] = last % 3; return state } var invertCoordEdgeFlip =function (coord) { // Inverse function for edge orientation let state = new Array(12).fill(0); let temp = coord; let last = 12; // Smallest multiple of 2 such that (last >= 1*11) for (let i=10; i>=0; i--) { state[i] = temp % 2; last -= state[i]; temp = Math.floor(temp / 2); } // Last edge is set such that the sum mod 2 is zero. state[11] = last % 2; return state } var invertCoordESlice = function (coord) { // Inverse function for permutation of the four E slice edges let state = new Array(12).fill(0); let tempCoord = coord; let nEdgesLeft = 4; let nextEdge = 8; // Count down since loop goes backward: 8,7,6,5 let order = [0,1,2,3,8,9,10,11,4,5,6,7]; // E slice edges come last for (let idx=0; idx<order.length; idx++) { if (tempCoord >= nChooseK(11-idx, nEdgesLeft-1)) { tempCoord -= nChooseK(11-idx, nEdgesLeft-1); } else { state[order[11-idx]] = (nextEdge--); nEdgesLeft--; } if (nEdgesLeft === 0) { break; } } return state; }; //======================================================================================= /* Phase 1 Move Tables Functions for constructing move tables for each coordinate. */ // Constants const nCornTwist = 2187; const nEdgeFlip = 2048; const nESlice = 495; //--------------------------------------------------------------------------------------- var buildCornerTwistMoveTable = function (move) { let moveTable = new Array(nCornTwist).fill(0); for (let coord=0; coord<nCornTwist; coord++) { let state = applyMoveCO(move, invertCoordCornTwist(coord)); moveTable[coord] = getCoordCornTwist(state); } return moveTable; }; var buildEdgeFlipMoveTable = function (move) { let moveTable = new Array(nEdgeFlip).fill(0); for (let coord=0; coord<nEdgeFlip; coord++) { let state = applyMoveEO(move, invertCoordEdgeFlip(coord)); moveTable[coord] = getCoordEdgeFlip(state); } return moveTable; }; var buildESliceMoveTable = function (move) { let moveTable = new Array(nESlice).fill(0); for (let coord=0; coord<nESlice; coord++) { let state = applyMoveEP(move, invertCoordESlice(coord)); moveTable[coord] = getCoordESlice(state); } return moveTable; }; //--------------------------------------------------------------------------------------- var buildTableForAllMoves = function (allowedMoves, tableFunction) { // Builds a move table for each of the move. allowedMoves is a list of moves // and tableFunction is a function which constructs the move table. let moveTables = {}; for (let m in allowedMoves) { let move = allowedMoves[m]; moveTables[move] = tableFunction(move); } return moveTables; }; var phase1AllowedMoves = ["R","R'","R2","U","U'","U2","F","F'","F2","D","D'","D2","L","L'","L2","B","B'","B2"]; // Add some extra moves. These take up a little extra space in the move tables, // but make it easier and faster to calculate sym coordinates var extendedAllowedMoves = phase1AllowedMoves.concat(['y', 'z2', 'RLmirror']); // Build all three move tables for phase 1 var moveTableCornerTwist = buildTableForAllMoves(extendedAllowedMoves, buildCornerTwistMoveTable); var moveTableEdgeFlip = buildTableForAllMoves(extendedAllowedMoves, buildEdgeFlipMoveTable); var moveTableESlice = buildTableForAllMoves(extendedAllowedMoves, buildESliceMoveTable); //======================================================================================= /* Symmetry reductions. Coordinates can be reduced using symmetry. The three symmetry transformations used are: y, z2, and RLmirror. */ var buildSymmetryMoveTable = function (allowedMoves, nCoordSize, moveTable) { let nextSymCoord = 0; let newMoveTable = 0; for (let i=0; i<nCoordSize; i++) { } } var getPhase1SymCoordMapTables = function (moveTables) { // Generate mapping tables to take raw EO and EsliceEdge coordinates to // a combined coordinate that is reduced by symmetry. let nextSymCoord = 0; let symCoordMap = [];//new Array(nPhase1Edges).fill(0); let symCoordInvMap = new Array(nEdgeFlip * nESliceEdgePerm).fill(0); let l = new Array(17).fill(0); for (let rawCoord=0; rawCoord < (nEdgeFlip * nESliceEdgePerm); rawCoord++) { let eoCoord = (rawCoord % nEdgeFlip); let eepCoord = Math.floor(rawCoord / nEdgeFlip); // console.log(' Start', eoCoord, eepCoord) let rawStates = []; // For each type of symmetry, calculate the state reached // and populate the mapping tables let alreadyVisited = false; let newRawCoord = 0; // console.log('start', rawCoord) for (let i=0; i<2; i++) { if (alreadyVisited) break; eoCoord = moveTables[1]['RLmirror'][eoCoord]; eepCoord = moveTables[2]['RLmirror'][eepCoord]; for (let j=0; j<2; j++) { if (alreadyVisited) break; eoCoord = moveTables[1]['z2'][eoCoord]; eepCoord = moveTables[2]['z2'][eepCoord]; for (let k=0; k<4; k++) { if (alreadyVisited) break; eoCoord = moveTables[1]['y'][eoCoord]; eepCoord = moveTables[2]['y'][eepCoord]; newRawCoord = (eepCoord * nEdgeFlip) + eoCoord; // console.log('', i,j,k,'--',rawCoord, '=>', newRawCoord, ' \tEO',eoCoord,'EP',eepCoord) if (newRawCoord < rawCoord) { alreadyVisited = true; break; } rawStates.push(newRawCoord); // console.log(rawCoord, newRawCoord); } } } // console.log('end', newRawCoord) if (alreadyVisited) { continue; } // let nUniqueSymmStates = 1; // // for (let j=1; j<rawStates.length; j++) { // if (rawStates[0] === rawStates[j]) { // nUniqueSymmStates++; // } // } // l[16/nUniqueSymmStates]++; for (let i=0; i<rawStates.length; i++) { symCoordInvMap[rawStates[i]] = nextSymCoord; } symCoordMap[nextSymCoord] = rawCoord; nextSymCoord++; } // console.log(nEdgeOri * nESliceEdgePerm); // console.log(nextSymCoord); // console.log(64430); // console.log(l); // console.log(symCoordMap.length, symCoordInvMap.length) return [symCoordMap, symCoordInvMap]; }<|fim▁end|>
<|file_name|>fxch_ST0_ST2.java<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License version 2 as published by the Free Software Foundation. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Details (including contact information) can be found at: jpc.sourceforge.net or the developer website sourceforge.net/projects/jpc/ End of licence header */ package com.github.smeny.jpc.emulator.execution.opcodes.pm; import com.github.smeny.jpc.emulator.execution.*; import com.github.smeny.jpc.emulator.execution.decoder.*; import com.github.smeny.jpc.emulator.processor.*; import com.github.smeny.jpc.emulator.processor.fpu64.*; import static com.github.smeny.jpc.emulator.processor.Processor.*; public class fxch_ST0_ST2 extends Executable { public fxch_ST0_ST2(int blockStart, int eip, int prefices, PeekableInputStream input) { super(blockStart, eip); int modrm = input.readU8(); } public Branch execute(Processor cpu) { double tmp = cpu.fpu.ST(0); cpu.fpu.setST(0, cpu.fpu.ST(2)); cpu.fpu.setST(2, tmp); return Branch.None; } public boolean isBranch() { return false; } public String toString() { return this.getClass().getName(); } }<|fim▁end|>
JPC: An x86 PC Hardware Emulator for a pure Java Virtual Machine Copyright (C) 2012-2013 Ian Preston
<|file_name|>storage.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0. define_error_codes!( "KV:Storage:", TIMEOUT => ("Timeout", "", ""), EMPTY_REQUEST => ("EmptyRequest", "", ""), CLOSED => ("Closed", "", ""), IO => ("Io", "", ""), SCHED_TOO_BUSY => ("SchedTooBusy", "", ""), GC_WORKER_TOO_BUSY => ("GcWorkerTooBusy", "", ""), KEY_TOO_LARGE => ("KeyTooLarge", "", ""), INVALID_CF => ("InvalidCF", "", ""), TTL_NOT_ENABLED => ("TTLNotEnabled", "", ""), PROTOBUF => ("Protobuf", "", ""), INVALID_TXN_TSO => ("INVALIDTXNTSO", "", ""), INVALID_REQ_RANGE => ("InvalidReqRange", "", ""), BAD_FORMAT_LOCK => ("BadFormatLock", "", ""), BAD_FORMAT_WRITE => ("BadFormatWrite", "",""), KEY_IS_LOCKED => ("KeyIsLocked", "", ""), MAX_TIMESTAMP_NOT_SYNCED => ("MaxTimestampNotSynced", "", ""), COMMITTED => ("Committed", "", ""), PESSIMISTIC_LOCK_ROLLED_BACK => ("PessimisticLockRolledBack", "", ""), TXN_LOCK_NOT_FOUND => ("TxnLockNotFound", "", ""), TXN_NOT_FOUND => ("TxnNotFound", "", ""), LOCK_TYPE_NOT_MATCH => ("LockTypeNotMatch", "", ""), WRITE_CONFLICT => ("WriteConflict", "", ""), DEADLOCK => ("Deadlock", "", ""), ALREADY_EXIST => ("AlreadyExist", "",""), DEFAULT_NOT_FOUND => ("DefaultNotFound", "", ""), COMMIT_TS_EXPIRED => ("CommitTsExpired", "", ""), KEY_VERSION => ("KeyVersion", "",""), PESSIMISTIC_LOCK_NOT_FOUND => ("PessimisticLockNotFound", "", ""), COMMIT_TS_TOO_LARGE => ("CommitTsTooLarge", "", ""), UNKNOWN => ("Unknown", "", "") );<|fim▁end|>
<|file_name|>sortController.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for ag-grid v4.0.5 // Project: http://www.ag-grid.com/ // Definitions by: Niall Crosby <https://github.com/ceolter/> // Definitions: https://github.com/borisyankov/DefinitelyTyped import { Column } from "./entities/column"; export declare class SortController { private static DEFAULT_SORTING_ORDER; private gridOptionsWrapper; private columnController; private eventService; progressSort(column: Column, multiSort: boolean): void;<|fim▁hole|> getSortModel(): { colId: string; sort: string; }[]; setSortModel(sortModel: any): void; getColumnsWithSortingOrdered(): Column[]; getSortForRowController(): any[]; }<|fim▁end|>
private dispatchSortChangedEvents(); private clearSortBarThisColumn(columnToSkip); private getNextSortDirection(column);
<|file_name|>blender_sim.py<|end_file_name|><|fim▁begin|>"""Simulation of controlled dumbbell around Itokawa with simulated imagery using Blender This will generate the imagery of Itokawa from a spacecraft following a vertical descent onto the surface. 4 August 2017 - Shankar Kulumani """ from __future__ import absolute_import, division, print_function, unicode_literals from scipy import integrate import numpy as np import pdb import h5py, cv2 import visualization.plotting as plotting from visualization import blender_camera from dynamics import asteroid, dumbbell, controller, eoms from kinematics import attitude from visualization import blender import inertial_driver as idriver import relative_driver as rdriver import datetime def eoms_controlled_blender(t, state, dum, ast): """Inertial dumbbell equations of motion about an asteroid This method must be used with the scipy.integrate.ode class instead of the more convienent scipy.integrate.odeint. In addition, we can control the dumbbell given full state feedback. Blender is used to generate imagery during the simulation. Inputs: t - Current simulation time step state - (18,) array which defines the state of the vehicle pos - (3,) position of the dumbbell with respect to the asteroid center of mass and expressed in the inertial frame vel - (3,) velocity of the dumbbell with respect to the asteroid center of mass and expressed in the inertial frame R - (9,) attitude of the dumbbell with defines the transformation of a vector in the dumbbell frame to the inertial frame ang_vel - (3,) angular velocity of the dumbbell with respect to the inertial frame and represented in the dumbbell frame ast - Asteroid class object holding the asteroid gravitational model and other useful parameters """ # unpack the state pos = state[0:3] # location of the center of mass in the inertial frame vel = state[3:6] # vel of com in inertial frame R = np.reshape(state[6:15],(3,3)) # sc body frame to inertial frame ang_vel = state[15:18] # angular velocity of sc wrt inertial frame defined in body frame Ra = attitude.rot3(ast.omega*t, 'c') # asteroid body frame to inertial frame # unpack parameters for the dumbbell J = dum.J rho1 = dum.zeta1 rho2 = dum.zeta2 # position of each mass in the asteroid frame z1 = Ra.T.dot(pos + R.dot(rho1)) z2 = Ra.T.dot(pos + R.dot(rho2)) z = Ra.T.dot(pos) # position of COM in asteroid frame # compute the potential at this state (U1, U1_grad, U1_grad_mat, U1laplace) = ast.polyhedron_potential(z1) (U2, U2_grad, U2_grad_mat, U2laplace) = ast.polyhedron_potential(z2) F1 = dum.m1*Ra.dot(U1_grad) F2 = dum.m2*Ra.dot(U2_grad) M1 = dum.m1 * attitude.hat_map(rho1).dot(R.T.dot(Ra).dot(U1_grad)) M2 = dum.m2 * attitude.hat_map(rho2).dot(R.T.dot(Ra).dot(U2_grad)) # generate image at this current state only at a specifc time # blender.driver(pos, R, ast.omega * t, [5, 0, 1], 'test' + str.zfill(str(t), 4)) # use the imagery to figure out motion and pass to the controller instead # of the true state # calculate the desired attitude and translational trajectory des_att_tuple = controller.body_fixed_pointing_attitude(t, state) des_tran_tuple = controller.traverse_then_land_vertically(t, ast, final_pos=[0.550, 0, 0], initial_pos=[2.550, 0, 0],<|fim▁hole|> descent_tf=3600) # input trajectory and compute the control inputs # compute the control input u_m = controller.attitude_controller(t, state, M1+M2, dum, ast, des_att_tuple) u_f = controller.translation_controller(t, state, F1+F2, dum, ast, des_tran_tuple) pos_dot = vel vel_dot = 1/(dum.m1+dum.m2) *(F1 + F2 + u_f) R_dot = R.dot(attitude.hat_map(ang_vel)).reshape(9) ang_vel_dot = np.linalg.inv(J).dot(-np.cross(ang_vel,J.dot(ang_vel)) + M1 + M2 + u_m) statedot = np.hstack((pos_dot, vel_dot, R_dot, ang_vel_dot)) return statedot def eoms_controlled_blender_traverse_then_land(t, state, dum, ast): """Inertial dumbbell equations of motion about an asteroid This method must be used with the scipy.integrate.ode class instead of the more convienent scipy.integrate.odeint. In addition, we can control the dumbbell given full state feedback. Blender is used to generate imagery during the simulation. The spacecraft will move horizontally for the first 3600 sec to a positon [2.550, 0, 0] in the asteroid (and inertial) frame, then descend vertically in the asteroid frame. Inputs: t - Current simulation time step state - (18,) array which defines the state of the vehicle pos - (3,) position of the dumbbell with respect to the asteroid center of mass and expressed in the inertial frame vel - (3,) velocity of the dumbbell with respect to the asteroid center of mass and expressed in the inertial frame R - (9,) attitude of the dumbbell with defines the transformation of a vector in the dumbbell frame to the inertial frame ang_vel - (3,) angular velocity of the dumbbell with respect to the inertial frame and represented in the dumbbell frame ast - Asteroid class object holding the asteroid gravitational model and other useful parameters """ # unpack the state pos = state[0:3] # location of the center of mass in the inertial frame vel = state[3:6] # vel of com in inertial frame R = np.reshape(state[6:15],(3,3)) # sc body frame to inertial frame ang_vel = state[15:18] # angular velocity of sc wrt inertial frame defined in body frame Ra = attitude.rot3(ast.omega*(t - 3600), 'c') # asteroid body frame to inertial frame # unpack parameters for the dumbbell J = dum.J rho1 = dum.zeta1 rho2 = dum.zeta2 # position of each mass in the asteroid frame z1 = Ra.T.dot(pos + R.dot(rho1)) z2 = Ra.T.dot(pos + R.dot(rho2)) z = Ra.T.dot(pos) # position of COM in asteroid frame # compute the potential at this state (U1, U1_grad, U1_grad_mat, U1laplace) = ast.polyhedron_potential(z1) (U2, U2_grad, U2_grad_mat, U2laplace) = ast.polyhedron_potential(z2) F1 = dum.m1*Ra.dot(U1_grad) F2 = dum.m2*Ra.dot(U2_grad) M1 = dum.m1 * attitude.hat_map(rho1).dot(R.T.dot(Ra).dot(U1_grad)) M2 = dum.m2 * attitude.hat_map(rho2).dot(R.T.dot(Ra).dot(U2_grad)) # generate image at this current state only at a specifc time # blender.driver(pos, R, ast.omega * t, [5, 0, 1], 'test' + str.zfill(str(t), 4)) # use the imagery to figure out motion and pass to the controller instead # of the true state # compute the control input u_m = controller.attitude_traverse_then_land_controller(t, state, M1+M2, dum, ast) u_f = controller.translation_traverse_then_land_controller(t, state, F1+F2, dum, ast) pos_dot = vel vel_dot = 1/(dum.m1+dum.m2) *(F1 + F2 + u_f) R_dot = R.dot(attitude.hat_map(ang_vel)).reshape(9) ang_vel_dot = np.linalg.inv(J).dot(-np.cross(ang_vel,J.dot(ang_vel)) + M1 + M2 + u_m) statedot = np.hstack((pos_dot, vel_dot, R_dot, ang_vel_dot)) return statedot def blender_traverse_then_land_sim(): # simulation parameters output_path = './visualization/blender' asteroid_name = 'itokawa_low' # create a HDF5 dataset hdf5_path = './data/itokawa_landing/{}_controlled_vertical_landing.hdf5'.format( datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")) dataset_name = 'landing' render = 'BLENDER' image_modulus = 400 RelTol = 1e-6 AbsTol = 1e-6 ast_name = 'itokawa' num_faces = 64 t0 = 0 dt = 1 tf = 7200 num_steps = 7200 periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607]) periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327]) ast = asteroid.Asteroid(ast_name,num_faces) dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003) # instantiate the blender scene once camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name) # get some of the camera parameters K = blender_camera.get_calibration_matrix_K_from_blender(camera) # set initial state for inertial EOMs # initial_pos = np.array([2.550, 0, 0]) # km for center of mass in body frame initial_pos = np.array([0, -2.550, 0]) initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos) initial_R = attitude.rot3(np.pi/2).reshape(9) # transforms from dumbbell body frame to the inertial frame initial_w = np.array([0.01, 0.01, 0.01]) initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w)) # instantiate ode object # system = integrate.ode(eoms_controlled_blender) system = integrate.ode(eoms_controlled_blender_traverse_then_land) system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000) system.set_initial_value(initial_state, t0) system.set_f_params(dum, ast) i_state = np.zeros((num_steps+1, 18)) time = np.zeros(num_steps+1) i_state[0, :] = initial_state with h5py.File(hdf5_path) as image_data: # create a dataset images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8') RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12)) R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9)) ii = 1 while system.successful() and system.t < tf: # integrate the system and save state to an array time[ii] = (system.t + dt) i_state[ii, :] = (system.integrate(system.t + dt)) # generate the view of the asteroid at this state if int(time[ii]) % image_modulus== 0: img, RT, R = blender.gen_image(i_state[ii,0:3], i_state[ii,6:15].reshape((3,3)), ast.omega * (time[ii] - 3600), camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene, [5, 0, 1], 'test') images[:, :, :, ii//image_modulus - 1] = img RT_blender[ii//image_modulus -1, :] = RT.reshape(12) R_i2bcam[ii//image_modulus -1, :] = R.reshape(9) # do some image processing and visual odometry print(system.t) ii += 1 image_data.create_dataset('K', data=K) image_data.create_dataset('i_state', data=i_state) image_data.create_dataset('time', data=time) def blender_vertical_landing_sim(): # simulation parameters output_path = './visualization/blender' asteroid_name = 'itokawa_low' # create a HDF5 dataset hdf5_path = './data/itokawa_landing/{}_controlled_vertical_landing.hdf5'.format( datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")) dataset_name = 'landing' render = 'BLENDER' image_modulus = 200 RelTol = 1e-6 AbsTol = 1e-6 ast_name = 'itokawa' num_faces = 64 t0 = 0 dt = 1 tf = 3600 num_steps = 3600 periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607]) periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327]) ast = asteroid.Asteroid(ast_name,num_faces) dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003) # instantiate the blender scene once camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name) # get some of the camera parameters K = blender_camera.get_calibration_matrix_K_from_blender(camera) # set initial state for inertial EOMs initial_pos = np.array([2.550, 0, 0]) # km for center of mass in body frame initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos) initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame initial_w = np.array([0.01, 0.01, 0.01]) initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w)) # instantiate ode object system = integrate.ode(eoms_controlled_blender) system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000) system.set_initial_value(initial_state, t0) system.set_f_params(dum, ast) i_state = np.zeros((num_steps+1, 18)) time = np.zeros(num_steps+1) i_state[0, :] = initial_state with h5py.File(hdf5_path) as image_data: # create a dataset images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8') RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12)) R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9)) ii = 1 while system.successful() and system.t < tf: # integrate the system and save state to an array time[ii] = (system.t + dt) i_state[ii, :] = (system.integrate(system.t + dt)) # generate the view of the asteroid at this state if int(time[ii]) % image_modulus == 0: img, RT, R = blender.gen_image(i_state[ii,0:3], i_state[ii,6:15].reshape((3, 3)), ast.omega * time[ii], camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene, [5, 0, 1], 'test') images[:, :, :, ii // image_modulus - 1] = img RT_blender[ii // image_modulus - 1, :] = RT.reshape(12) R_i2bcam[ii // image_modulus - 1, :] = R.reshape(9) # do some image processing and visual odometry ii += 1 image_data.create_dataset('K', data=K) image_data.create_dataset('i_state', data=i_state) image_data.create_dataset('time', data=time) def blender_inertial_circumnavigate(gen_images=False): """Move around the asteroid in the inertial frame, but assume no rotation of the asteroid """ # simulation parameters output_path = './visualization/blender' asteroid_name = 'itokawa_high' # create a HDF5 dataset hdf5_path = './data/asteroid_circumnavigate/{}_inertial_no_ast_rotation.hdf5'.format( datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")) dataset_name = 'landing' render = 'BLENDER' image_modulus = 1 RelTol = 1e-6 AbsTol = 1e-6 ast_name = 'itokawa' num_faces = 64 t0 = 0 dt = 1 tf = 3600 * 4 num_steps = 3600 * 4 loops = 4 periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607]) periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327]) ast = asteroid.Asteroid(ast_name,num_faces) dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003) # instantiate the blender scene once camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name) # get some of the camera parameters K = blender_camera.get_calibration_matrix_K_from_blender(camera) # set initial state for inertial EOMs initial_pos = np.array([3, 0, 0]) # km for center of mass in body frame initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos) initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame initial_w = np.array([0.01, 0.01, 0.01]) initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w)) # instantiate ode object system = integrate.ode(eoms.eoms_controlled_inertial_circumnavigate) system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000) system.set_initial_value(initial_state, t0) system.set_f_params(dum, ast, tf, loops) i_state = np.zeros((num_steps+1, 18)) time = np.zeros(num_steps+1) i_state[0, :] = initial_state with h5py.File(hdf5_path) as image_data: # create a dataset if gen_images: images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8') RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12)) R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9)) ii = 1 while system.successful() and system.t < tf: # integrate the system and save state to an array time[ii] = (system.t + dt) i_state[ii, :] = (system.integrate(system.t + dt)) # generate the view of the asteroid at this state if int(time[ii]) % image_modulus == 0 and gen_images: # img, RT, R = blender.gen_image(i_state[ii,0:3], i_state[ii,6:15].reshape((3, 3)), # ast.omega * time[ii], # camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene, # [5, 0, 1], 'test') img, RT, R = blender.gen_image_fixed_ast(i_state[ii,0:3], i_state[ii,6:15].reshape((3,3)), camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene, [5, 0, 1], 'test') images[:, :, :, ii // image_modulus - 1] = img RT_blender[ii // image_modulus - 1, :] = RT.reshape(12) R_i2bcam[ii // image_modulus - 1, :] = R.reshape(9) # do some image processing and visual odometry ii += 1 image_data.create_dataset('K', data=K) image_data.create_dataset('i_state', data=i_state) image_data.create_dataset('time', data=time) def blender_inertial_lissajous(gen_images=False): """Move around the asteroid in the inertial frame, but assume no rotation of the asteroid """ # simulation parameters output_path = './visualization/blender' asteroid_name = 'itokawa_high' # create a HDF5 dataset hdf5_path = './data/asteroid_circumnavigate/{}_inertial_no_ast_rotation_lissajous.hdf5'.format( datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")) dataset_name = 'landing' render = 'BLENDER' image_modulus = 1 RelTol = 1e-6 AbsTol = 1e-6 ast_name = 'itokawa' num_faces = 64 t0 = 0 dt = 1 tf = 3600 * 2 num_steps = 3600 * 2 loops = 2 periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607]) periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327]) ast = asteroid.Asteroid(ast_name,num_faces) dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003) # instantiate the blender scene once camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name) # get some of the camera parameters K = blender_camera.get_calibration_matrix_K_from_blender(camera) # set initial state for inertial EOMs initial_pos = np.array([3, 3, 0]) # km for center of mass in body frame initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos) initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame initial_w = np.array([0.01, 0.01, 0.01]) initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w)) # instantiate ode object system = integrate.ode(eoms.eoms_controlled_inertial_lissajous) system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000) system.set_initial_value(initial_state, t0) system.set_f_params(dum, ast, tf, loops) i_state = np.zeros((num_steps+1, 18)) time = np.zeros(num_steps+1) i_state[0, :] = initial_state with h5py.File(hdf5_path) as image_data: # create a dataset if gen_images: images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8') RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12)) R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9)) ii = 1 while system.successful() and system.t < tf: # integrate the system and save state to an array time[ii] = (system.t + dt) i_state[ii, :] = (system.integrate(system.t + dt)) # generate the view of the asteroid at this state if int(time[ii]) % image_modulus == 0 and gen_images: # img, RT, R = blender.gen_image(i_state[ii,0:3], i_state[ii,6:15].reshape((3, 3)), # ast.omega * time[ii], # camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene, # [5, 0, 1], 'test') img, RT, R = blender.gen_image_fixed_ast(i_state[ii,0:3], i_state[ii,6:15].reshape((3,3)), camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene, [5, 0, 1], 'test') images[:, :, :, ii // image_modulus - 1] = img RT_blender[ii // image_modulus - 1, :] = RT.reshape(12) R_i2bcam[ii // image_modulus - 1, :] = R.reshape(9) # do some image processing and visual odometry ii += 1 image_data.create_dataset('K', data=K) image_data.create_dataset('i_state', data=i_state) image_data.create_dataset('time', data=time) def blender_inertial_quarter_equatorial(gen_images=False): """Move around the asteroid in the inertial frame, but assume no rotation of the asteroid Moves in the xy positive quadrant in the equatorial plane """ # simulation parameters output_path = './visualization/blender' asteroid_name = 'itokawa_high' # create a HDF5 dataset hdf5_path = './data/asteroid_circumnavigate/{}_inertial_no_ast_rotation_quarter_xy.hdf5'.format( datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")) dataset_name = 'landing' render = 'BLENDER' image_modulus = 1 RelTol = 1e-6 AbsTol = 1e-6 ast_name = 'itokawa' num_faces = 64 t0 = 0 dt = 1 tf = 3600 * 4 num_steps = 3600 * 4 loops = 4 periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607]) periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327]) ast = asteroid.Asteroid(ast_name,num_faces) dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003) # instantiate the blender scene once camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name) # get some of the camera parameters K = blender_camera.get_calibration_matrix_K_from_blender(camera) # set initial state for inertial EOMs initial_pos = np.array([3, 0, 0]) # km for center of mass in body frame initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos) initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame initial_w = np.array([0.01, 0.01, 0.01]) initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w)) # instantiate ode object system = integrate.ode(eoms.eoms_controlled_inertial_quarter_equatorial) system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000) system.set_initial_value(initial_state, t0) system.set_f_params(dum, ast, tf, loops) i_state = np.zeros((num_steps+1, 18)) time = np.zeros(num_steps+1) i_state[0, :] = initial_state with h5py.File(hdf5_path) as image_data: # create a dataset if gen_images: images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8') RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12)) R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9)) ii = 1 while system.successful() and system.t < tf: # integrate the system and save state to an array time[ii] = (system.t + dt) i_state[ii, :] = (system.integrate(system.t + dt)) # generate the view of the asteroid at this state if int(time[ii]) % image_modulus == 0 and gen_images: # img, RT, R = blender.gen_image(i_state[ii,0:3], i_state[ii,6:15].reshape((3, 3)), # ast.omega * time[ii], # camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene, # [5, 0, 1], 'test') img, RT, R = blender.gen_image_fixed_ast(i_state[ii,0:3], i_state[ii,6:15].reshape((3,3)), camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene, [5, 0, 1], 'test') images[:, :, :, ii // image_modulus - 1] = img RT_blender[ii // image_modulus - 1, :] = RT.reshape(12) R_i2bcam[ii // image_modulus - 1, :] = R.reshape(9) # do some image processing and visual odometry ii += 1 image_data.create_dataset('K', data=K) image_data.create_dataset('i_state', data=i_state) image_data.create_dataset('time', data=time)<|fim▁end|>
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # M2Crypto documentation build configuration file, created by # sphinx-quickstart on Thu Apr 20 11:15:12 2017. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath(os.path.join('..'))) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'M2Crypto' copyright = u'2017, Matej Cepl <[email protected]>' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '' # The full version, including alpha/beta/rc tags. release = '' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'M2Cryptodoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'M2Crypto.tex', u'M2Crypto Documentation', u'Matej Cepl \\textless{}[email protected]\\textgreater{}', 'manual'),<|fim▁hole|> # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'm2crypto', u'M2Crypto Documentation', [u'Matej Cepl <[email protected]>'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'M2Crypto', u'M2Crypto Documentation', u'Matej Cepl <[email protected]>', 'M2Crypto', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'M2Crypto' epub_author = u'Matej Cepl <[email protected]>' epub_publisher = u'Matej Cepl <[email protected]>' epub_copyright = u'2017, Matej Cepl <[email protected]>' # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # A tuple containing the cover image and cover page html template filenames. #epub_cover = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. #epub_exclude_files = [] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True<|fim▁end|>
]
<|file_name|>TimeLineWidget.cpp<|end_file_name|><|fim▁begin|>/* * TimeLineWidget.cpp - class timeLine, representing a time-line with position marker * * Copyright (c) 2004-2014 Tobias Doerffel <tobydox/at/users.sourceforge.net> * * This file is part of LMMS - https://lmms.io * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program (see COPYING); if not, write to the * Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA 02110-1301 USA. * */ #include <QDomElement> #include <QTimer> #include <QApplication> #include <QLayout> #include <QMouseEvent> #include <QPainter> #include <QToolBar> #include "TimeLineWidget.h" #include "embed.h" #include "NStateButton.h" #include "GuiApplication.h" #include "TextFloat.h" #include "SongEditor.h" QPixmap * TimeLineWidget::s_posMarkerPixmap = nullptr; TimeLineWidget::TimeLineWidget( const int xoff, const int yoff, const float ppb, Song::PlayPos & pos, const TimePos & begin, Song::PlayModes mode, QWidget * parent ) : QWidget( parent ), m_inactiveLoopColor( 52, 63, 53, 64 ), m_inactiveLoopBrush( QColor( 255, 255, 255, 32 ) ), m_inactiveLoopInnerColor( 255, 255, 255, 32 ), m_activeLoopColor( 52, 63, 53, 255 ), m_activeLoopBrush( QColor( 55, 141, 89 ) ), m_activeLoopInnerColor( 74, 155, 100, 255 ), m_loopRectangleVerticalPadding( 1 ), m_barLineColor( 192, 192, 192 ), m_barNumberColor( m_barLineColor.darker( 120 ) ), m_autoScroll( AutoScrollEnabled ), m_loopPoints( LoopPointsDisabled ), m_behaviourAtStop( BackToZero ), m_changedPosition( true ), m_xOffset( xoff ), m_posMarkerX( 0 ), m_ppb( ppb ), m_pos( pos ), m_begin( begin ), m_mode( mode ), m_savedPos( -1 ), m_hint( nullptr ), m_action( NoAction ), m_moveXOff( 0 ) { m_loopPos[0] = 0; m_loopPos[1] = DefaultTicksPerBar; if( s_posMarkerPixmap == nullptr ) { s_posMarkerPixmap = new QPixmap( embed::getIconPixmap( "playpos_marker" ) ); } setAttribute( Qt::WA_OpaquePaintEvent, true ); move( 0, yoff ); m_xOffset -= s_posMarkerPixmap->width() / 2; setMouseTracking(true); m_pos.m_timeLine = this; QTimer * updateTimer = new QTimer( this ); connect( updateTimer, SIGNAL( timeout() ), this, SLOT( updatePosition() ) ); updateTimer->start( 1000 / 60 ); // 60 fps connect( Engine::getSong(), SIGNAL( timeSignatureChanged( int,int ) ), this, SLOT( update() ) ); } TimeLineWidget::~TimeLineWidget() { if( getGUI()->songEditor() ) { m_pos.m_timeLine = nullptr; } delete m_hint; } void TimeLineWidget::setXOffset(const int x) { m_xOffset = x; if (s_posMarkerPixmap != nullptr) { m_xOffset -= s_posMarkerPixmap->width() / 2; } } void TimeLineWidget::addToolButtons( QToolBar * _tool_bar ) { NStateButton * autoScroll = new NStateButton( _tool_bar ); autoScroll->setGeneralToolTip( tr( "Auto scrolling" ) ); autoScroll->addState( embed::getIconPixmap( "autoscroll_on" ) ); autoScroll->addState( embed::getIconPixmap( "autoscroll_off" ) ); connect( autoScroll, SIGNAL( changedState( int ) ), this, SLOT( toggleAutoScroll( int ) ) ); NStateButton * loopPoints = new NStateButton( _tool_bar ); loopPoints->setGeneralToolTip( tr( "Loop points" ) ); loopPoints->addState( embed::getIconPixmap( "loop_points_off" ) ); loopPoints->addState( embed::getIconPixmap( "loop_points_on" ) ); connect( loopPoints, SIGNAL( changedState( int ) ), this, SLOT( toggleLoopPoints( int ) ) ); connect( this, SIGNAL( loopPointStateLoaded( int ) ), loopPoints, SLOT( changeState( int ) ) ); NStateButton * behaviourAtStop = new NStateButton( _tool_bar ); behaviourAtStop->addState( embed::getIconPixmap( "back_to_zero" ), tr( "After stopping go back to beginning" ) ); behaviourAtStop->addState( embed::getIconPixmap( "back_to_start" ), tr( "After stopping go back to " "position at which playing was " "started" ) ); behaviourAtStop->addState( embed::getIconPixmap( "keep_stop_position" ), tr( "After stopping keep position" ) ); connect( behaviourAtStop, SIGNAL( changedState( int ) ), this, SLOT( toggleBehaviourAtStop( int ) ) ); connect( this, SIGNAL( loadBehaviourAtStop( int ) ), behaviourAtStop, SLOT( changeState( int ) ) ); behaviourAtStop->changeState( BackToStart ); _tool_bar->addWidget( autoScroll ); _tool_bar->addWidget( loopPoints ); _tool_bar->addWidget( behaviourAtStop ); } void TimeLineWidget::saveSettings( QDomDocument & _doc, QDomElement & _this ) { _this.setAttribute( "lp0pos", (int) loopBegin() ); _this.setAttribute( "lp1pos", (int) loopEnd() ); _this.setAttribute( "lpstate", m_loopPoints ); _this.setAttribute( "stopbehaviour", m_behaviourAtStop ); } void TimeLineWidget::loadSettings( const QDomElement & _this ) { m_loopPos[0] = _this.attribute( "lp0pos" ).toInt(); m_loopPos[1] = _this.attribute( "lp1pos" ).toInt(); m_loopPoints = static_cast<LoopPointStates>( _this.attribute( "lpstate" ).toInt() ); update(); emit loopPointStateLoaded( m_loopPoints ); if( _this.hasAttribute( "stopbehaviour" ) ) { emit loadBehaviourAtStop( _this.attribute( "stopbehaviour" ).toInt() ); } } void TimeLineWidget::updatePosition( const TimePos & ) { const int new_x = markerX( m_pos ); if( new_x != m_posMarkerX ) { m_posMarkerX = new_x; m_changedPosition = true; emit positionChanged( m_pos ); update(); } } void TimeLineWidget::toggleAutoScroll( int _n ) { m_autoScroll = static_cast<AutoScrollStates>( _n ); } void TimeLineWidget::toggleLoopPoints( int _n ) { m_loopPoints = static_cast<LoopPointStates>( _n ); update(); } void TimeLineWidget::toggleBehaviourAtStop( int _n ) { m_behaviourAtStop = static_cast<BehaviourAtStopStates>( _n ); } void TimeLineWidget::paintEvent( QPaintEvent * ) { QPainter p( this ); // Draw background p.fillRect( 0, 0, width(), height(), p.background() ); // Clip so that we only draw everything starting from the offset const int leftMargin = m_xOffset + s_posMarkerPixmap->width() / 2; p.setClipRect(leftMargin, 0, width() - leftMargin, height() ); // Draw the loop rectangle int const & loopRectMargin = getLoopRectangleVerticalPadding(); int const loopRectHeight = this->height() - 2 * loopRectMargin; int const loopStart = markerX( loopBegin() ) + 8; int const loopEndR = markerX( loopEnd() ) + 9; int const loopRectWidth = loopEndR - loopStart; bool const loopPointsActive = loopPointsEnabled(); // Draw the main rectangle (inner fill only) QRect outerRectangle( loopStart, loopRectMargin, loopRectWidth - 1, loopRectHeight - 1 ); p.fillRect( outerRectangle, loopPointsActive ? getActiveLoopBrush() : getInactiveLoopBrush()); // Draw the bar lines and numbers // Activate hinting on the font QFont font = p.font(); font.setHintingPreference( QFont::PreferFullHinting ); p.setFont(font); int const fontAscent = p.fontMetrics().ascent(); int const fontHeight = p.fontMetrics().height();<|fim▁hole|> QColor const & barLineColor = getBarLineColor(); QColor const & barNumberColor = getBarNumberColor(); bar_t barNumber = m_begin.getBar(); int const x = m_xOffset + s_posMarkerPixmap->width() / 2 - ( ( static_cast<int>( m_begin * m_ppb ) / TimePos::ticksPerBar() ) % static_cast<int>( m_ppb ) ); for( int i = 0; x + i * m_ppb < width(); ++i ) { ++barNumber; if( ( barNumber - 1 ) % qMax( 1, qRound( 1.0f / 3.0f * TimePos::ticksPerBar() / m_ppb ) ) == 0 ) { const int cx = x + qRound( i * m_ppb ); p.setPen( barLineColor ); p.drawLine( cx, 5, cx, height() - 6 ); const QString s = QString::number( barNumber ); p.setPen( barNumberColor ); p.drawText( cx + 5, ((height() - fontHeight) / 2) + fontAscent, s ); } } // Draw the main rectangle (outer border) p.setPen( loopPointsActive ? getActiveLoopColor() : getInactiveLoopColor() ); p.setBrush( Qt::NoBrush ); p.drawRect( outerRectangle ); // Draw the inner border outline (no fill) QRect innerRectangle = outerRectangle.adjusted( 1, 1, -1, -1 ); p.setPen( loopPointsActive ? getActiveLoopInnerColor() : getInactiveLoopInnerColor() ); p.setBrush( Qt::NoBrush ); p.drawRect( innerRectangle ); // Only draw the position marker if the position line is in view if (m_posMarkerX >= m_xOffset && m_posMarkerX < width() - s_posMarkerPixmap->width() / 2) { // Let the position marker extrude to the left p.setClipping(false); p.setOpacity(0.6); p.drawPixmap(m_posMarkerX, height() - s_posMarkerPixmap->height(), *s_posMarkerPixmap); } } void TimeLineWidget::mousePressEvent( QMouseEvent* event ) { if( event->x() < m_xOffset ) { return; } if( event->button() == Qt::LeftButton && !(event->modifiers() & Qt::ShiftModifier) ) { m_action = MovePositionMarker; if( event->x() - m_xOffset < s_posMarkerPixmap->width() ) { m_moveXOff = event->x() - m_xOffset; } else { m_moveXOff = s_posMarkerPixmap->width() / 2; } } else if( event->button() == Qt::LeftButton && (event->modifiers() & Qt::ShiftModifier) ) { m_action = SelectSongTCO; m_initalXSelect = event->x(); } else if( event->button() == Qt::RightButton ) { m_moveXOff = s_posMarkerPixmap->width() / 2; const TimePos t = m_begin + static_cast<int>( qMax( event->x() - m_xOffset - m_moveXOff, 0 ) * TimePos::ticksPerBar() / m_ppb ); const TimePos loopMid = ( m_loopPos[0] + m_loopPos[1] ) / 2; if( t < loopMid ) { m_action = MoveLoopBegin; } else if( t > loopMid ) { m_action = MoveLoopEnd; } if( m_loopPos[0] > m_loopPos[1] ) { qSwap( m_loopPos[0], m_loopPos[1] ); } m_loopPos[( m_action == MoveLoopBegin ) ? 0 : 1] = t; } if( m_action == MoveLoopBegin || m_action == MoveLoopEnd ) { delete m_hint; m_hint = TextFloat::displayMessage( tr( "Hint" ), tr( "Press <%1> to disable magnetic loop points." ).arg(UI_CTRL_KEY), embed::getIconPixmap( "hint" ), 0 ); } mouseMoveEvent( event ); } void TimeLineWidget::mouseMoveEvent( QMouseEvent* event ) { parentWidget()->update(); // essential for widgets that this timeline had taken their mouse move event from. const TimePos t = m_begin + static_cast<int>( qMax( event->x() - m_xOffset - m_moveXOff, 0 ) * TimePos::ticksPerBar() / m_ppb ); switch( m_action ) { case MovePositionMarker: m_pos.setTicks(t.getTicks()); Engine::getSong()->setToTime(t, m_mode); if (!( Engine::getSong()->isPlaying())) { //Song::Mode_None is used when nothing is being played. Engine::getSong()->setToTime(t, Song::Mode_None); } m_pos.setCurrentFrame( 0 ); m_pos.setJumped( true ); updatePosition(); positionMarkerMoved(); break; case MoveLoopBegin: case MoveLoopEnd: { const int i = m_action - MoveLoopBegin; // i == 0 || i == 1 if( event->modifiers() & Qt::ControlModifier ) { // no ctrl-press-hint when having ctrl pressed delete m_hint; m_hint = nullptr; m_loopPos[i] = t; } else { m_loopPos[i] = t.quantize(1.0); } // Catch begin == end if( m_loopPos[0] == m_loopPos[1] ) { // Note, swap 1 and 0 below and the behavior "skips" the other // marking instead of pushing it. if( m_action == MoveLoopBegin ) { m_loopPos[0] -= TimePos::ticksPerBar(); } else { m_loopPos[1] += TimePos::ticksPerBar(); } } update(); break; } case SelectSongTCO: emit regionSelectedFromPixels( m_initalXSelect , event->x() ); break; default: break; } } void TimeLineWidget::mouseReleaseEvent( QMouseEvent* event ) { delete m_hint; m_hint = nullptr; if ( m_action == SelectSongTCO ) { emit selectionFinished(); } m_action = NoAction; }<|fim▁end|>