max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
341
"""Unit tests for the application.""" from flask import current_app from flask import request from deepchat import create_app, db from deepchat.models import User, Conversation, Chatbot, Turn import sys import unittest import sqlite3 import sqlalchemy class TestDatabase(unittest.TestCase): def setUp(self): """Called before running a test.""" self.app = create_app('testing') self.app_context = self.app.app_context() self.app_context.push() db.create_all() def tearDown(self): """Called after running a test.""" db.session.remove() db.drop_all() self.app_context.pop() def test_app_exists(self): self.assertFalse(current_app is None)
283
463
# pylint: skip-file """disable-all is usable as an inline option""" # no warning should be issued try: import this except: pass
46
639
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. #pragma once #include <set> #include <sqlite3.h> #include "nnfusion/common/common.hpp" namespace nnfusion { namespace cache { // presently only kernel cache database supported // Todo: integrate the interfaces of profiling cache database struct KernelEntry { std::string key; std::string identifier; std::string op_type; nlohmann::json attributes; std::string source; std::string device_type; nlohmann::json function; std::set<std::string> tags; nlohmann::json miscs; std::map<std::string, float> profile; int resource; KernelEntry() { key = ""; identifier = ""; op_type = ""; attributes = nlohmann::json(); source = ""; device_type = ""; function = nlohmann::json(); tags.clear(); miscs = nlohmann::json(); } }; using KernelEntry_p = std::shared_ptr<KernelEntry>; class KernelCacheManager { public: KernelCacheManager(); ~KernelCacheManager(); std::vector<KernelEntry_p> fetch_all(std::string identifier, std::string device_type); KernelEntry_p fetch_with_tags(std::string identifier, std::string device_type, std::set<std::string> tags, bool efficient = false); std::vector<KernelEntry_p> fetch_with_source(std::string identifier, std::string device_type, std::string source); bool insert_kernel_entry(const KernelEntry_p kernel_entry, bool overwrite = false); bool is_valid() { return kernel_cache != nullptr; } public: // TODO(lingm): SupportOpList depends on the correctness of the KernelContext identifier static std::unordered_set<std::string> SupportOpList; private: std::string m_path; static sqlite3* kernel_cache; }; } //namespace cache } //namespace nnfusion
1,259
3,227
<filename>Three/include/CGAL/Three/Scene_zoomable_item_interface.h // Copyright (c) 2009,2010,2012,2015 GeometryFactory Sarl (France) // All rights reserved. // // This file is part of CGAL (www.cgal.org). // // $URL$ // $Id$ // SPDX-License-Identifier: GPL-3.0-or-later OR LicenseRef-Commercial // // Author(s) : <NAME> #ifndef SCENE_ZOOMABLE_ITEM_INTERFACE_H #define SCENE_ZOOMABLE_ITEM_INTERFACE_H #include <CGAL/license/Three.h> #include <QtPlugin> #include <QPoint> namespace CGAL { namespace Three { class Viewer_interface; //! This class provides a function to move the camera orthogonaly to the wanted region class Scene_zoomable_item_interface { public: virtual ~Scene_zoomable_item_interface(){} //! Move the camera orthogonaly to the region defined by `point` virtual void zoomToPosition(const QPoint& point, CGAL::Three::Viewer_interface*)const = 0; }; } } Q_DECLARE_INTERFACE(CGAL::Three::Scene_zoomable_item_interface, "com.geometryfactory.PolyhedronDemo.ZoomInterface/1.0") #endif // SCENE_ZOOMABLE_ITEM_INTERFACE_H
380
5,169
{ "name": "FluxMobileStaging", "version": "0.2.0", "summary": "Integrate FluxPanda into your mobile app", "description": "More details at https://www.fluxpanda.com", "homepage": "https://github.com/PrototypeInteractive/stream-shop-mobile-sdk-ios", "license": { "type": "All rights reserved", "file": "LICENSE" }, "authors": { "Flux Panda Inc.": "<EMAIL>" }, "platforms": { "ios": "11.0" }, "source": { "git": "https://github.com/PrototypeInteractive/stream-shop-mobile-sdk-ios.git", "tag": "0.2.0" }, "vendored_frameworks": "FluxMobileStaging.xcframework" }
250
868
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved. // // Licensed under the BSD 3-Clause License (the "License"); you may not use this // file except in compliance with the License. You may obtain a copy of the // License at // // https://opensource.org/licenses/BSD-3-Clause // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations under // the License. #ifndef FLARE_BASE_INTERNAL_COPYABLE_ATOMIC_H_ #define FLARE_BASE_INTERNAL_COPYABLE_ATOMIC_H_ #include <atomic> namespace flare::internal { // Make `std::atomic<T>` copyable. template <class T> class CopyableAtomic : public std::atomic<T> { public: CopyableAtomic() = default; /* implicit */ CopyableAtomic(T value) : std::atomic<T>(std::move(value)) {} constexpr CopyableAtomic(const CopyableAtomic& from) : std::atomic<T>(from.load()) {} constexpr CopyableAtomic& operator=(const CopyableAtomic& from) { store(from.load()); return *this; } }; } // namespace flare::internal #endif // FLARE_BASE_INTERNAL_COPYABLE_ATOMIC_H_
414
780
package com.codeborne.selenide.commands; import com.codeborne.selenide.SelenideElement; import com.codeborne.selenide.impl.WebElementSource; import org.junit.jupiter.api.Test; import org.openqa.selenium.WebElement; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; final class ToWebElementCommandTest { private final SelenideElement proxy = mock(SelenideElement.class); private final WebElementSource locator = mock(WebElementSource.class); private final ToWebElement command = new ToWebElement(); private final WebElement webElement = mock(WebElement.class); @Test void returnsUnderlyingWebElement() { when(locator.getWebElement()).thenReturn(webElement); assertThat(command.execute(proxy, locator, new Object[]{})).isEqualTo(webElement); verify(locator).getWebElement(); } }
301
359
<gh_stars>100-1000 /* * Copyright (c) 2016. <NAME> <<EMAIL>> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.hibernate.cache.redis.hibernate52.util; import org.hibernate.SessionFactory; import org.hibernate.internal.SessionFactoryImpl; import org.hibernate.persister.entity.EntityPersister; /** * Hibernate 2nd cache Utility class * * @author <EMAIL> */ public final class HibernateCacheUtil { private HibernateCacheUtil() { } public static String getRegionName(SessionFactory sessionFactory, Class entityClass) { EntityPersister p = ((SessionFactoryImpl) sessionFactory).getEntityPersister(entityClass.getName()); if (p.hasCache()) { return p.getCacheAccessStrategy().getRegion().getName(); } return ""; } }
428
769
// Copyright 2017 ETH Zurich and University of Bologna. // Copyright and related rights are licensed under the Solderpad Hardware // License, Version 0.51 (the “License”); you may not use this file except in // compliance with the License. You may obtain a copy of the License at // http://solderpad.org/licenses/SHL-0.51. Unless required by applicable law // or agreed to in writing, software, hardware and materials distributed under // this License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. #include <main.cpp> unsigned int x; void setup() { Serial.begin(781250); pinMode(4,INPUT); pinMode(0,OUTPUT); //used to intiate test bench stimulus (for simulation only) digitalWrite(0,HIGH); } void loop() { x= pulseIn(4,1); Serial.print("Calculated\t"); Serial.print(x); Serial.print("\texpected\t"); Serial.println("500"); x= pulseInLong(4,1); Serial.print("Calculated\t"); Serial.print(x); Serial.print("\texpected\t"); Serial.println("500"); delay(1); exit(0); //to end simulation properly }
379
14,668
<reponame>chromium/chromium // Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef ASH_COMPONENTS_ARC_MEMORY_ARC_MEMORY_BRIDGE_H_ #define ASH_COMPONENTS_ARC_MEMORY_ARC_MEMORY_BRIDGE_H_ #include "ash/components/arc/mojom/memory.mojom.h" #include "base/callback_forward.h" #include "base/threading/thread_checker.h" #include "components/keyed_service/core/keyed_service.h" namespace content { class BrowserContext; } // namespace content namespace arc { class ArcBridgeService; // Collects information from other ArcServices and send UMA metrics. class ArcMemoryBridge : public KeyedService { public: // Returns singleton instance for the given BrowserContext, // or nullptr if the browser |context| is not allowed to use ARC. static ArcMemoryBridge* GetForBrowserContext( content::BrowserContext* context); static ArcMemoryBridge* GetForBrowserContextForTesting( content::BrowserContext* context); ArcMemoryBridge(content::BrowserContext* context, ArcBridgeService* bridge_service); ArcMemoryBridge(const ArcMemoryBridge&) = delete; ArcMemoryBridge& operator=(const ArcMemoryBridge&) = delete; ~ArcMemoryBridge() override; // Drops the guest kernel's page caches. using DropCachesCallback = base::OnceCallback<void(bool)>; void DropCaches(DropCachesCallback callback); private: THREAD_CHECKER(thread_checker_); ArcBridgeService* const arc_bridge_service_; // Owned by ArcServiceManager. }; } // namespace arc #endif // ASH_COMPONENTS_ARC_MEMORY_ARC_MEMORY_BRIDGE_H_
533
480
/* * Copyright [2013-2021], Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.polardbx.optimizer.config.meta; import com.alibaba.polardbx.common.utils.logger.Logger; import com.alibaba.polardbx.common.utils.logger.LoggerFactory; import com.alibaba.polardbx.optimizer.core.rel.LogicalView; import com.alibaba.polardbx.optimizer.core.rel.MysqlTableScan; import com.alibaba.polardbx.optimizer.view.ViewPlan; import org.apache.calcite.plan.volcano.RelSubset; import org.apache.calcite.rel.core.TableLookup; import org.apache.calcite.rel.core.Window; import org.apache.calcite.rel.metadata.ReflectiveRelMetadataProvider; import org.apache.calcite.rel.metadata.RelColumnOrigin; import org.apache.calcite.rel.metadata.RelMdColumnOrigins; import org.apache.calcite.rel.metadata.RelMetadataProvider; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.util.BuiltInMethod; import java.util.Set; public class DrdsRelMdColumnOrigins extends RelMdColumnOrigins { /** * make sure you have overridden the SOURCE */ public static final RelMetadataProvider SOURCE = ReflectiveRelMetadataProvider.reflectiveSource( BuiltInMethod.COLUMN_ORIGIN.method, new DrdsRelMdColumnOrigins()); private final static Logger logger = LoggerFactory.getLogger(DrdsRelMdColumnOrigins.class); public Set<RelColumnOrigin> getColumnOrigins(LogicalView rel, RelMetadataQuery mq, int iOutputColumn) { return rel.getColumnOrigins(mq, iOutputColumn); } public Set<RelColumnOrigin> getColumnOrigins(RelSubset rel, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(rel.getOriginal(), iOutputColumn); } public Set<RelColumnOrigin> getColumnOrigins(TableLookup tableLookup, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(tableLookup.getProject(), iOutputColumn); } public Set<RelColumnOrigin> getColumnOrigins(ViewPlan viewPlan, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(viewPlan.getPlan(), iOutputColumn); } public Set<RelColumnOrigin> getColumnOrigins(MysqlTableScan mysqlTableScan, RelMetadataQuery mq, int iOutputColumn) { return mq.getColumnOrigins(mysqlTableScan.getNodeForMetaQuery(), iOutputColumn); } public Set<RelColumnOrigin> getColumnOrigins(Window rel, RelMetadataQuery mq, int iOutputColumn) { if (iOutputColumn < rel.getInput().getRowType().getFieldCount()) { return mq.getColumnOrigins(rel.getInput(), iOutputColumn); } return null; } }
1,215
1,546
<reponame>skylerpfli/libpag<gh_stars>1000+ ///////////////////////////////////////////////////////////////////////////////////////////////// // // Tencent is pleased to support the open source community by making libpag available. // // Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file // except in compliance with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // unless required by applicable law or agreed to in writing, software distributed under the // license is distributed on an "as is" basis, without warranties or conditions of any kind, // either express or implied. see the license for the specific language governing permissions // and limitations under the license. // ///////////////////////////////////////////////////////////////////////////////////////////////// #include "GLGeometryProcessor.h" namespace tgfx { void GLGeometryProcessor::setTransformDataHelper(const Matrix& localMatrix, const ProgramDataManager& programDataManager, FPCoordTransformIter* transformIter) { int i = 0; while (const CoordTransform* coordTransform = transformIter->next()) { Matrix combined = Matrix::I(); combined.setConcat(coordTransform->matrix, localMatrix); auto& uniform = installedTransforms[i]; if (!uniform.updated || uniform.currentMatrix != combined) { uniform.updated = true; uniform.currentMatrix = combined; programDataManager.setMatrix(uniform.handle, combined); } ++i; } } void GLGeometryProcessor::emitTransforms(VertexShaderBuilder* vertexBuilder, VaryingHandler* varyingHandler, UniformHandler* uniformHandler, const ShaderVar& localCoordsVar, FPCoordTransformHandler* transformHandler) { std::string localCoords = "vec3("; localCoords += localCoordsVar.name(); localCoords += ", 1)"; int i = 0; while (transformHandler->nextCoordTransform() != nullptr) { std::string strUniName = "CoordTransformMatrix_"; strUniName += std::to_string(i); std::string uniName; TransformUniform transformUniform; transformUniform.handle = uniformHandler->addUniform( ShaderFlags::Vertex, ShaderVar::Type::Float3x3, strUniName, &uniName); installedTransforms.push_back(transformUniform); std::string strVaryingName = "TransformedCoords_"; strVaryingName += std::to_string(i); ShaderVar::Type varyingType = ShaderVar::Type::Float2; auto varying = varyingHandler->addVarying(strVaryingName, varyingType); transformHandler->specifyCoordsForCurrCoordTransform(varying.name(), varyingType); vertexBuilder->codeAppendf("%s = (%s * %s).xy;", varying.vsOut().c_str(), uniName.c_str(), localCoords.c_str()); ++i; } } } // namespace tgfx
1,148
310
<filename>gear/hardware/s/superego.json { "name": "SUPEREGO", "description": "An effects synth.", "url": "https://www.ehx.com/products/superego" }
64
360
<filename>cfripper/rules/sns_topic_policy.py __all__ = ["SNSTopicPolicyNotPrincipalRule", "SNSTopicDangerousPolicyActionsRule"] from typing import Dict, Optional from pycfmodel.model.resources.sns_topic_policy import SNSTopicPolicy from cfripper.model.enums import RuleGranularity, RuleRisk from cfripper.model.result import Result from cfripper.rules.base_rules import BaseDangerousPolicyActions, ResourceSpecificRule class SNSTopicPolicyNotPrincipalRule(ResourceSpecificRule): """ Checks if an SNS topic policy has an Allow + a NotPrincipal. Risk: AWS **strongly** recommends against using `NotPrincipal` in the same policy statement as `"Effect": "Allow"`. Doing so grants the permissions specified in the policy statement to all principals except the one named in the `NotPrincipal` element. By doing this, you might grant access to anonymous (unauthenticated) users. Filters context: | Parameter | Type | Description | |:-----------------------:|:--------------------------------:|:--------------------------------------------------------------:| |`config` | str | `config` variable available inside the rule | |`extras` | str | `extras` variable available inside the rule | |`logical_id` | str | ID used in Cloudformation to refer the resource being analysed | |`resource` | `SNSTopicPolicy` | Resource that is being addressed | |`statement` | `Statement` | Statement being checked found in the Resource | """ GRANULARITY = RuleGranularity.RESOURCE REASON = "SNS Topic policy {} should not allow Allow and NotPrincipal at the same time" RESOURCE_TYPES = (SNSTopicPolicy,) def resource_invoke(self, resource: SNSTopicPolicy, logical_id: str, extras: Optional[Dict] = None) -> Result: result = Result() for statement in resource.Properties.PolicyDocument._statement_as_list(): if statement.NotPrincipal: self.add_failure_to_result( result, self.REASON.format(logical_id), resource_ids={logical_id}, context={ "config": self._config, "extras": extras, "logical_id": logical_id, "resource": resource, "statement": statement, }, ) return result class SNSTopicDangerousPolicyActionsRule(BaseDangerousPolicyActions): f""" Checks for dangerous permissions in Action statements in an SNS Topic Policy. Risk: This is deemed a potential security risk as it could allow privilege escalation. {BaseDangerousPolicyActions.DEFAULT_FILTERS_CONTEXT} """ REASON = "SNS Topic policy {} should not not include the following dangerous actions: {}" RISK_VALUE = RuleRisk.MEDIUM RESOURCE_TYPES = (SNSTopicPolicy,) DANGEROUS_ACTIONS = [ "sns:AddPermission", "sns:RemovePermission", "sns:TagResource", "sns:UntagResource", ]
1,551
342
<reponame>None1637/osu-droid-1 package ru.nsu.ccfit.zuev.osu.helper; import org.anddev.andengine.engine.camera.Camera; import org.anddev.andengine.entity.sprite.Sprite; import org.anddev.andengine.opengl.texture.region.TextureRegion; import javax.microedition.khronos.opengles.GL10; import ru.nsu.ccfit.zuev.osu.ResourceManager; public class AnimSprite extends Sprite { public enum LoopType { STOP, // stop at last frame LOOP, // loop from start DISAPPEAR, // disappear after last frame FROZE // do not automatically update frame } private final int count; private final TextureRegion[] regions; private int frame; private float animTime; private float fps; private LoopType loopType = LoopType.LOOP; public AnimSprite(final float px, final float py, final String texname, int count, final float fps) { super(px, py, ResourceManager.getInstance().getTexture(texname + "0")); if (count == 0) { count = 1; } this.count = count; this.fps = fps; this.frame = 0; this.animTime = 0; regions = new TextureRegion[count]; for (int i = 0; i < count; i++) { regions[i] = ResourceManager.getInstance().getTexture(texname + i); } if (fps == 0) { loopType = LoopType.FROZE; } } public AnimSprite(final float px, final float py, final float fps, final String... textures) { super(px, py, ResourceManager.getInstance().getTextureIfLoaded(textures[0])); this.count = textures.length; this.fps = fps; this.frame = 0; this.animTime = 0; regions = new TextureRegion[count]; for (int i = 0; i < count; i++) { regions[i] = ResourceManager.getInstance().getTextureIfLoaded(textures[i]); } if (fps == 0) { loopType = LoopType.FROZE; } } public void setLoopType(LoopType loopType) { this.loopType = loopType; } public LoopType getLoopType() { return loopType; } /** * Automatically update frame. * If loopType is {@link LoopType#FROZE} or fps is 0, this will do nothing */ private void updateFrame() { if (loopType == LoopType.FROZE || fps == 0) { return; } int frameByTime = (int) (this.animTime * fps); switch (loopType) { case LOOP: frame = frameByTime % count; break; case STOP: frame = Math.min(frameByTime, count - 1); break; case DISAPPEAR: frame = Math.min(frameByTime, count); break; default: break; } } /** * It's not recommended to call this method if you are not initialing this sprite */ public void setFps(final float fps) { frame = 0; this.fps = fps; } /** * Force set animation to target frame. * @param frame target frame */ public void setFrame(int frame) { if (this.loopType == LoopType.FROZE || fps == 0) { this.frame = frame; } else { this.animTime = (frame + 0.0001f) / fps; updateFrame(); } } public void setAnimTime(float animTime) { this.animTime = animTime; updateFrame(); } @Override protected void onManagedUpdate(final float pSecondsElapsed) { this.animTime += pSecondsElapsed; updateFrame(); super.onManagedUpdate(pSecondsElapsed); } @Override protected void doDraw(final GL10 pGL, final Camera pCamera) { if (regions.length == 0 || frame < 0 || frame >= regions.length) { return; } regions[frame].onApply(pGL); onInitDraw(pGL); onApplyVertices(pGL); drawVertices(pGL, pCamera); } @Override public void setFlippedHorizontal(final boolean pFlippedHorizontal) { for (final TextureRegion reg : regions) { reg.setFlippedHorizontal(pFlippedHorizontal); } } public float getFrameWidth() { if (frame < regions.length && frame >= 0) { return regions[frame].getWidth(); } else if (regions.length > 0) { return regions[0].getWidth(); } else { return 40; } } public void setTextureRegion(final int index, final TextureRegion region) { regions[index] = region; } public TextureRegion getTextureRegionAt(final int index) { return regions[index]; } public int getTextureRegionCount() { return regions.length; } }
2,158
3,212
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.rules; import java.util.Map; /** * An ActionHandler executes the provided {@link Action} for a given set of facts */ public interface ActionHandler { /** * Execute the given action for the provided facts * @param action The action that should be performed by the handler * @param facts The facts that triggered this action */ void execute(Action action, Map<String, Object> facts); }
321
736
/* * Copyright 2021 java-diff-utils. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.difflib.algorithm; import java.util.function.BiPredicate; /** * Tool to create new instances of a diff algorithm. This one is only needed at the moment to * set DiffUtils default diff algorithm. * @author tw */ public interface DiffAlgorithmFactory { <T> DiffAlgorithmI<T> create(); <T> DiffAlgorithmI<T> create(BiPredicate<T, T> equalizer); }
282
12,315
# -*- coding: utf-8 -*- """ Created on Sat May 30 13:24:01 2015 @author: rlabbe """ # -*- coding: utf-8 -*- """ Created on Thu May 28 20:23:57 2015 @author: rlabbe """ from math import cos, sin, sqrt, atan2, tan import matplotlib.pyplot as plt import numpy as np from numpy import array, dot from numpy.random import randn from filterpy.common import plot_covariance_ellipse from filterpy.kalman import ExtendedKalmanFilter as EKF from sympy import Matrix, symbols import sympy def print_x(x): print(x[0, 0], x[1, 0], np.degrees(x[2, 0])) def normalize_angle(x, index): if x[index] > np.pi: x[index] -= 2*np.pi if x[index] < -np.pi: x[index] = 2*np.pi def residual(a,b): y = a - b normalize_angle(y, 1) return y sigma_r = 1 sigma_h = .1#np.radians(1) sigma_steer = np.radians(1) class RobotEKF(EKF): def __init__(self, dt, wheelbase): EKF.__init__(self, 3, 2, 2) self.dt = dt self.wheelbase = wheelbase a, x, y, v, w, theta, time = symbols( 'a, x, y, v, w, theta, t') d = v*time beta = (d/w)*sympy.tan(a) r = w/sympy.tan(a) self.fxu = Matrix([[x-r*sympy.sin(theta)+r*sympy.sin(theta+beta)], [y+r*sympy.cos(theta)-r*sympy.cos(theta+beta)], [theta+beta]]) self.F_j = self.fxu.jacobian(Matrix([x, y, theta])) self.V_j = self.fxu.jacobian(Matrix([v, a])) self.subs = {x: 0, y: 0, v:0, a:0, time:dt, w:wheelbase, theta:0} self.x_x = x self.x_y = y self.v = v self.a = a self.theta = theta def predict(self, u=0): self.x = self.move(self.x, u, self.dt) self.subs[self.theta] = self.x[2, 0] self.subs[self.v] = u[0] self.subs[self.a] = u[1] F = array(self.F_j.evalf(subs=self.subs)).astype(float) V = array(self.V_j.evalf(subs=self.subs)).astype(float) # covariance of motion noise in control space M = array([[0.1*u[0]**2, 0], [0, sigma_steer**2]]) self.P = dot(F, self.P).dot(F.T) + dot(V, M).dot(V.T) def move(self, x, u, dt): h = x[2, 0] v = u[0] steering_angle = u[1] dist = v*dt if abs(steering_angle) < 0.0001: # approximate straight line with huge radius r = 1.e-30 b = dist / self.wheelbase * tan(steering_angle) r = self.wheelbase / tan(steering_angle) # radius sinh = sin(h) sinhb = sin(h + b) cosh = cos(h) coshb = cos(h + b) return x + array([[-r*sinh + r*sinhb], [r*cosh - r*coshb], [b]]) def H_of(x, p): """ compute Jacobian of H matrix where h(x) computes the range and bearing to a landmark 'p' for state x """ px = p[0] py = p[1] hyp = (px - x[0, 0])**2 + (py - x[1, 0])**2 dist = np.sqrt(hyp) H = array( [[(-px + x[0, 0]) / dist, (-py + x[1, 0]) / dist, 0.], [ -(-py + x[1, 0]) / hyp, -(px - x[0, 0]) / hyp, -1.]]) return H def Hx(x, p): """ takes a state variable and returns the measurement that would correspond to that state. """ px = p[0] py = p[1] dist = np.sqrt((px - x[0, 0])**2 + (py - x[1, 0])**2) Hx = array([[dist], [atan2(py - x[1, 0], px - x[0, 0]) - x[2, 0]]]) return Hx dt = 1.0 ekf = RobotEKF(dt, wheelbase=0.5) #np.random.seed(1234) m = array([[5, 10], [10, 5], [15, 15]]) ekf.x = array([[2, 6, .3]]).T ekf.P = np.diag([.1, .1, .1]) ekf.R = np.diag([sigma_r**2, sigma_h**2]) u = array([1.1, .01]) xp = ekf.x.copy() plt.figure() plt.scatter(m[:, 0], m[:, 1]) for i in range(250): xp = ekf.move(xp, u, dt/10.) # simulate robot plt.plot(xp[0], xp[1], ',', color='g') if i % 10 == 0: ekf.predict(u=u) plot_covariance_ellipse((ekf.x[0,0], ekf.x[1,0]), ekf.P[0:2, 0:2], std=3, facecolor='b', alpha=0.08) for lmark in m: d = sqrt((lmark[0] - xp[0, 0])**2 + (lmark[1] - xp[1, 0])**2) + randn()*sigma_r a = atan2(lmark[1] - xp[1, 0], lmark[0] - xp[0, 0]) - xp[2, 0] + randn()*sigma_h z = np.array([[d], [a]]) ekf.update(z, HJacobian=H_of, Hx=Hx, residual=residual, args=(lmark), hx_args=(lmark)) plot_covariance_ellipse((ekf.x[0,0], ekf.x[1,0]), ekf.P[0:2, 0:2], std=3, facecolor='g', alpha=0.4) #plt.plot(ekf.x[0], ekf.x[1], 'x', color='r') plt.axis('equal') plt.title("EKF Robot localization") plt.show()
2,641
3,940
//{{NO_DEPENDENCIES}} // Microsoft Visual C++ generated include file. // Used by ClassicIEDLL.rc // #define IDI_APPICON 1 #define IDR_CLASSICIEDLL 101 #define IDR_CLASSICIEBHO 102 #define IDB_BITMAP1 201 #define IDB_GLOW 201 #define IDS_APP_TITLE 5000 #define IDS_SETTINGS_TITLE 5001 #define IDS_SETTINGS_TITLE_VER 5002 #define IDS_NEW_SETTINGS 5003 #define IDS_TITLE_SETTINGS 5004 #define IDS_SHOW_CAPTION 5005 #define IDS_SHOW_CAPTION_TIP 5006 #define IDS_CENTER_CAPTION 5007 #define IDS_CENTER_CAPTION_TIP 5008 #define IDS_LANGUAGE_SETTINGS 5009 #define IDS_CAPTION_FONT 5010 #define IDS_CAPTION_FONT_TIP 5011 #define IDS_TEXT_COLOR 5012 #define IDS_TEXT_COLOR_TIP 5013 #define IDS_MAXTEXT_COLOR 5014 #define IDS_MAXTEXT_COLOR_TIP 5015 #define IDS_INTEXT_COLOR 5016 #define IDS_INTEXT_COLOR_TIP 5017 #define IDS_MAXINTEXT_COLOR 5018 #define IDS_MAXINTEXT_COLOR_TIP 5019 #define IDS_GLOW 5020 #define IDS_GLOW_TIP 5021 #define IDS_GLOW_COLOR 5022 #define IDS_GLOW_COLOR_TIP 5023 #define IDS_MAXGLOW 5024 #define IDS_MAXGLOW_TIP 5025 #define IDS_MAXGLOW_COLOR 5026 #define IDS_MAXGLOW_COLOR_TIP 5027 #define IDS_STATUS_SETTINGS 5028 #define IDS_SHOW_PROGRESS 5029 #define IDS_SHOW_PROGRESS_TIP 5030 #define IDS_SHOW_ZONE 5031 #define IDS_SHOW_ZONE_TIP 5032 #define IDS_SHOW_PROTECTED 5033 #define IDS_SHOW_PROTECTED_TIP 5034 #define IDS_SHOW_ICON 5035 #define IDS_SHOW_ICON_TIP 5036 // Next default values for new objects // #ifdef APSTUDIO_INVOKED #ifndef APSTUDIO_READONLY_SYMBOLS #define _APS_NEXT_RESOURCE_VALUE 202 #define _APS_NEXT_COMMAND_VALUE 32768 #define _APS_NEXT_CONTROL_VALUE 201 #define _APS_NEXT_SYMED_VALUE 103 #endif #endif
1,342
2,421
<reponame>OpenHFT/Chronicle-Map package net.openhft.chronicle.map.fromdocs.acid.genesis; import net.openhft.chronicle.map.ChronicleMap; import net.openhft.chronicle.map.fromdocs.BondVOInterface; import net.openhft.chronicle.map.fromdocs.acid.ChronicleAcidIsolation; import java.util.Scanner; import java.util.concurrent.locks.StampedLock; import static net.openhft.chronicle.values.Values.newNativeReference; public class DirtyReadVictim implements Runnable { private int isoLevel = ChronicleAcidIsolation.LOWEST_LATENCY; private ChronicleMap chm; private StampedLock offHeapLock; DirtyReadVictim(String isoL) { if (isoL.equals("DIRTY_READ_INTOLERANT")) this.isoLevel = ChronicleAcidIsolation.DIRTY_READ_INTOLERANT; else if (isoL.equals("DIRTY_READ_OPTIMISTIC")) this.isoLevel = ChronicleAcidIsolation.DIRTY_READ_OPTIMISTIC; } @Override public void run() { Scanner sc = new Scanner(System.in); try { /** * <EMAIL> START */ Double coupon = 0.00; BondVOInterface bond = newNativeReference(BondVOInterface.class); long stamp = 0; System.out.println( " ,,@t=" + System.currentTimeMillis() + " DirtyReadVictim CALLING offHeapLock.tryOptimisticRead()" ); while ((stamp = this.offHeapLock.tryOptimisticRead()) == 0) { ; // none } System.out.println( " ,,@t=" + System.currentTimeMillis() + " DirtyReadVictim CALLED offHeapLock.tryOptimisticRead()" ); try { chm.acquireUsing("369604101", bond); System.out.println( " ,,@t=" + System.currentTimeMillis() + " DirtyReadVictim calling chm.get('369604101').getCoupon()" ); bond = (BondVOInterface) chm.get("369604101"); coupon = bond.getCoupon(); System.out.println( " ,,@t=" + System.currentTimeMillis() + " DirtyReadVictim coupon=[" + coupon + "] read." ); System.out.println( " ,,@t=" + System.currentTimeMillis() + " DirtyReadVictim sleeping 10 seconds" ); Thread.sleep(10_000); } finally { if (this.offHeapLock.validate(stamp)) { System.out.println( " ,,@t=" + System.currentTimeMillis() + " DirtyReadVictim OPTIMISTICALLY_READ coupon=" + coupon + " " ); } else { System.out.println( " ,,@t=" + System.currentTimeMillis() + " DirtyReadVictim FAILED offHeapLock.validate(stamp) " + " must apply PESSIMISTIC_POLICY (dirty read endured)" + " coupon=[" + coupon + "] is *DIRTY*. " ); } } /** * <EMAIL> END */ System.out.println( " ,,@t=" + System.currentTimeMillis() + " DirtyReadVictim got() coupon=" + coupon + " " ); System.out.println( " ,,@t=" + System.currentTimeMillis() + " DirtyReadVictim COMMITTED" ); } catch (Exception throwables) { throwables.printStackTrace(); } } public ChronicleMap getCraig() { return this.chm; } public void setCraig(ChronicleMap craig) { this.chm = craig; } public void setStampedLock(StampedLock _sLock) { this.offHeapLock = _sLock; } }
2,234
653
<reponame>mkinsner/llvm //==------ sycl_fe_intrins.hpp --- SYCL Device Compiler's FE intrinsics ----==// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // C++ intrinsics recognized by the SYCL device compiler frontend //===----------------------------------------------------------------------===// #pragma once #include <cstddef> #include <cstdint> #ifdef __SYCL_DEVICE_ONLY__ // Get the value of the specialization constant with given name. // Post-link tool traces the ID to a string literal it points to and assigns // integer ID. template <typename T> SYCL_EXTERNAL T __sycl_getScalarSpecConstantValue(const char *ID); template <typename T> SYCL_EXTERNAL T __sycl_getCompositeSpecConstantValue(const char *ID); // The intrinsics below are used to implement support SYCL2020 specialization // constants. SYCL2020 version requires more parameters compared to the initial // version. // Get the value of the specialization constant with given symbolic ID. // `SymbolicID` is a unique string ID of a specialization constant. // `DefaultValue` contains a pointer to a global variable with the initializer, // which should be used as the default value of the specialization constants. // `RTBuffer` is a pointer to a runtime buffer, which holds values of all // specialization constant and should be used if native specialization constants // are not available. template <typename T> SYCL_EXTERNAL T __sycl_getScalar2020SpecConstantValue(const char *SymbolicID, const void *DefaultValue, const void *RTBuffer); template <typename T> SYCL_EXTERNAL T __sycl_getComposite2020SpecConstantValue( const char *SymbolicID, const void *DefaultValue, const void *RTBuffer); // Request a fixed-size allocation in local address space at kernel scope. extern "C" SYCL_EXTERNAL __attribute__((opencl_local)) std::uint8_t * __sycl_allocateLocalMemory(std::size_t Size, std::size_t Alignment); #endif
707
908
import rq import sys from rq import use_connection, Queue from redis import Redis redis_con = Redis('redis', 6379) redis_q = Queue(connection=redis_con) started = rq.registry.StartedJobRegistry('default', connection=redis_con) failed = rq.registry.FailedJobRegistry('default', connection=redis_con) comp = rq.registry.FinishedJobRegistry('default', connection=redis_con) comp_list = comp.get_job_ids() cur_list = started.get_job_ids() job_id = sys.argv[1] #job_id = '31d91dbde1c24e60a2c0e439a4ec43c3' #job_id = '742714c1e70c4ba3a833dde4472ebbbc' job = redis_q.fetch_job(job_id) #comp.cleanup() #comp.remove(job) print(dir(job)) print(job.ttl) job.set_status('finished') job.save() comp.add(job, -1) job.cleanup(-1) comp_list = comp.get_job_ids() print(comp_list) Queue.dequeue_any(redis_q, None, connection=redis_con) #job = redis_q.fetch_job(job_id) #print(job)
497
1,350
<filename>sdk/cognitiveservices/ms-azure-cs-luis-authoring/src/main/java/com/microsoft/azure/cognitiveservices/language/luis/authoring/models/DeleteAppsOptionalParameter.java<gh_stars>1000+ /** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.cognitiveservices.language.luis.authoring.models; /** * The DeleteAppsOptionalParameter model. */ public class DeleteAppsOptionalParameter { /** * A flag to indicate whether to force an operation. */ private Boolean force; /** * Gets or sets the preferred language for the response. */ private String thisclientacceptLanguage; /** * Get the force value. * * @return the force value */ public Boolean force() { return this.force; } /** * Set the force value. * * @param force the force value to set * @return the DeleteAppsOptionalParameter object itself. */ public DeleteAppsOptionalParameter withForce(Boolean force) { this.force = force; return this; } /** * Get the thisclientacceptLanguage value. * * @return the thisclientacceptLanguage value */ public String thisclientacceptLanguage() { return this.thisclientacceptLanguage; } /** * Set the thisclientacceptLanguage value. * * @param thisclientacceptLanguage the thisclientacceptLanguage value to set * @return the DeleteAppsOptionalParameter object itself. */ public DeleteAppsOptionalParameter withThisclientacceptLanguage(String thisclientacceptLanguage) { this.thisclientacceptLanguage = thisclientacceptLanguage; return this; } }
621
458
// Copyright 2015-2021 Swim Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.json; import java.math.BigInteger; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Output; import swim.codec.Parser; import swim.codec.Unicode; import swim.util.Builder; /** * Factory for constructing JSON parsers and parse trees. */ public abstract class JsonParser<I, V> { public abstract I item(V value); public abstract V value(I item); public abstract I field(V key, V value); public abstract Builder<I, V> documentBuilder(); public abstract Builder<I, V> objectBuilder(); public abstract Builder<I, V> arrayBuilder(); public abstract Output<V> textOutput(); public abstract V ident(V value); public abstract V num(int value); public abstract V num(long value); public abstract V num(float value); public abstract V num(double value); public abstract V num(BigInteger value); public abstract V num(String value); public abstract V uint32(int value); public abstract V uint64(long value); public abstract V bool(boolean value); public Parser<V> parseValue(Input input) { return ValueParser.parse(input, this); } public Parser<V> parseObject(Input input) { return ObjectParser.parse(input, this); } public Parser<V> parseArray(Input input) { return ArrayParser.parse(input, this); } public Parser<V> parseIdent(Input input) { return IdentParser.parse(input, this); } public Parser<V> parseString(Input input) { return StringParser.parse(input, this); } public Parser<V> parseNumber(Input input) { return NumberParser.parse(input, this); } public Parser<V> valueParser() { return new ValueParser<I, V>(this); } public Parser<V> objectParser() { return new ObjectParser<I, V>(this); } public Parser<V> arrayParser() { return new ArrayParser<I, V>(this); } public Parser<V> documentParser() { return new DocumentParser<I, V>(this); } public V parseValueString(String string) { Input input = Unicode.stringInput(string); while (input.isCont() && Json.isWhitespace(input.head())) { input = input.step(); } Parser<V> parser = this.parseValue(input); if (parser.isDone()) { while (input.isCont() && Json.isWhitespace(input.head())) { input = input.step(); } } if (input.isCont() && !parser.isError()) { parser = Parser.error(Diagnostic.unexpected(input)); } else if (input.isError()) { parser = Parser.error(input.trap()); } return parser.bind(); } public V parseObjectString(String string) { Input input = Unicode.stringInput(string); while (input.isCont() && Json.isWhitespace(input.head())) { input = input.step(); } Parser<V> parser = this.parseObject(input); if (parser.isDone()) { while (input.isCont() && Json.isWhitespace(input.head())) { input = input.step(); } } if (input.isCont() && !parser.isError()) { parser = Parser.error(Diagnostic.unexpected(input)); } else if (input.isError()) { parser = Parser.error(input.trap()); } return parser.bind(); } }
1,242
2,144
<filename>game_shared/bot/nav_node.cpp // nav_node.cpp // AI Navigation Nodes // Author: <NAME> (<EMAIL>), January 2003 #include "extdll.h" #include "util.h" #include "cbase.h" #include "bot_util.h" #include "nav_node.h" NavDirType Opposite[ NUM_DIRECTIONS ] = { SOUTH, WEST, NORTH, EAST }; CNavNode *CNavNode::m_list = NULL; unsigned int CNavNode::m_listLength = 0; Extent NodeMapExtent; //-------------------------------------------------------------------------------------------------------------- /** * Constructor */ CNavNode::CNavNode( const Vector *pos, const Vector *normal, CNavNode *parent ) { m_pos = *pos; m_normal = *normal; static unsigned int nextID = 1; m_id = nextID++; for( int i=0; i<NUM_DIRECTIONS; i++ ) m_to[ i ] = NULL; m_visited = 0; m_parent = parent; m_next = m_list; m_list = this; m_listLength++; m_isCovered = false; m_area = NULL; m_attributeFlags = 0; //CONSOLE_ECHO( " Created node #%d ( %g, %g, %g )\n", m_id, pos->x, pos->y, pos->z ); } //-------------------------------------------------------------------------------------------------------------- /** * Create a connection FROM this node TO the given node, in the given direction */ void CNavNode::ConnectTo( CNavNode *node, NavDirType dir ) { m_to[ dir ] = node; } //-------------------------------------------------------------------------------------------------------------- /** * Return node at given position. * @todo Need a hash table to make this lookup fast */ const CNavNode *CNavNode::GetNode( const Vector *pos ) { const float tolerance = 0.45f * GenerationStepSize; // 1.0f for( const CNavNode *node = m_list; node; node = node->m_next ) { float dx = ABS( node->m_pos.x - pos->x ); float dy = ABS( node->m_pos.y - pos->y ); float dz = ABS( node->m_pos.z - pos->z ); if (dx < tolerance && dy < tolerance && dz < tolerance) return node; } return NULL; } //-------------------------------------------------------------------------------------------------------------- /** * Return true if this node is bidirectionally linked to * another node in the given direction */ BOOL CNavNode::IsBiLinked( NavDirType dir ) const { if (m_to[ dir ] && m_to[ dir ]->m_to[ Opposite[dir] ] == this) return true; return false; } //-------------------------------------------------------------------------------------------------------------- /** * Return true if this node is the NW corner of a quad of nodes * that are all bidirectionally linked. */ BOOL CNavNode::IsClosedCell( void ) const { if (IsBiLinked( SOUTH ) && IsBiLinked( EAST ) && m_to[ EAST ]->IsBiLinked( SOUTH ) && m_to[ SOUTH ]->IsBiLinked( EAST ) && m_to[ EAST ]->m_to[ SOUTH ] == m_to[ SOUTH ]->m_to[ EAST ]) return true; return false; }
914
892
{ "schema_version": "1.2.0", "id": "GHSA-xf82-xc5r-2cm2", "modified": "2022-05-01T07:25:03Z", "published": "2022-05-01T07:25:03Z", "aliases": [ "CVE-2006-5118" ], "details": "PHP remote file inclusion vulnerability in index.php3 in the PDD package for PHPSelect Web Development Division allows remote attackers to execute arbitrary PHP code via a URL in the Application_Root parameter.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-5118" }, { "type": "WEB", "url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/29223" }, { "type": "WEB", "url": "http://securityreason.com/securityalert/1666" }, { "type": "WEB", "url": "http://www.securityfocus.com/archive/1/447177/100/0/threaded" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/20231" } ], "database_specific": { "cwe_ids": [ ], "severity": "HIGH", "github_reviewed": false } }
485
1,128
<filename>opennlp-tools/src/test/java/opennlp/tools/lemmatizer/LemmaSampleTest.java<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package opennlp.tools.lemmatizer; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectInputStream; import java.io.ObjectOutput; import java.io.ObjectOutputStream; import java.io.StringReader; import org.junit.Assert; import org.junit.Test; public class LemmaSampleTest { @Test(expected = IllegalArgumentException.class) public void testParameterValidation() { new LemmaSample(new String[] { "" }, new String[] { "" }, new String[] { "test", "one element to much" }); } private static String[] createSentence() { return new String[] { "Forecasts", "for", "the", "trade", "figures", "range", "widely", "." }; } private static String[] createTags() { return new String[] { "NNS", "IN", "DT", "NN", "NNS", "VBP", "RB", "." }; } private static String[] createLemmas() { return new String[] { "Forecast", "for", "the", "trade", "figure", "range", "widely", "." }; } @Test public void testLemmaSampleSerDe() throws IOException { LemmaSample lemmaSample = createGoldSample(); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); ObjectOutput out = new ObjectOutputStream(byteArrayOutputStream); out.writeObject(lemmaSample); out.flush(); byte[] bytes = byteArrayOutputStream.toByteArray(); ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes); ObjectInput objectInput = new ObjectInputStream(byteArrayInputStream); LemmaSample deSerializedLemmaSample = null; try { deSerializedLemmaSample = (LemmaSample) objectInput.readObject(); } catch (ClassNotFoundException e) { // do nothing } Assert.assertNotNull(deSerializedLemmaSample); Assert.assertArrayEquals(lemmaSample.getLemmas(), deSerializedLemmaSample.getLemmas()); Assert.assertArrayEquals(lemmaSample.getTokens(), deSerializedLemmaSample.getTokens()); Assert.assertArrayEquals(lemmaSample.getTags(), deSerializedLemmaSample.getTags()); } @Test public void testRetrievingContent() { LemmaSample sample = new LemmaSample(createSentence(), createTags(), createLemmas()); Assert.assertArrayEquals(createSentence(), sample.getTokens()); Assert.assertArrayEquals(createTags(), sample.getTags()); Assert.assertArrayEquals(createLemmas(), sample.getLemmas()); } @Test public void testToString() throws IOException { LemmaSample sample = new LemmaSample(createSentence(), createTags(), createLemmas()); String[] sentence = createSentence(); String[] tags = createTags(); String[] lemmas = createLemmas(); StringReader sr = new StringReader(sample.toString()); BufferedReader reader = new BufferedReader(sr); for (int i = 0; i < sentence.length; i++) { String line = reader.readLine(); String[] parts = line.split("\t"); Assert.assertEquals(3, parts.length); Assert.assertEquals(sentence[i], parts[0]); Assert.assertEquals(tags[i], parts[1]); Assert.assertEquals(lemmas[i], parts[2]); } } @Test public void testEquals() { Assert.assertFalse(createGoldSample() == createGoldSample()); Assert.assertTrue(createGoldSample().equals(createGoldSample())); Assert.assertFalse(createPredSample().equals(createGoldSample())); Assert.assertFalse(createPredSample().equals(new Object())); } public static LemmaSample createGoldSample() { return new LemmaSample(createSentence(), createTags(), createLemmas()); } public static LemmaSample createPredSample() { String[] lemmas = createLemmas(); lemmas[5] = "figure"; return new LemmaSample(createSentence(), createTags(), lemmas); } }
1,520
6,992
/* * Copyright 2002-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.oauth2.core.user; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Map; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.authority.SimpleGrantedAuthority; /** * @author <NAME> */ public final class TestOAuth2Users { private TestOAuth2Users() { } public static DefaultOAuth2User create() { String nameAttributeKey = "username"; Map<String, Object> attributes = new HashMap<>(); attributes.put(nameAttributeKey, "user"); Collection<GrantedAuthority> authorities = authorities(attributes); return new DefaultOAuth2User(authorities, attributes, nameAttributeKey); } private static Collection<GrantedAuthority> authorities(Map<String, Object> attributes) { return new LinkedHashSet<>(Arrays.asList(new OAuth2UserAuthority(attributes), new SimpleGrantedAuthority("SCOPE_read"), new SimpleGrantedAuthority("SCOPE_write"))); } }
476
464
<gh_stars>100-1000 package com.codezjx.andlinker; interface ITransfer extends android.os.IInterface { /** * Local-side IPC implementation stub class. */ abstract class Stub extends android.os.Binder implements ITransfer { private static final String DESCRIPTOR = "com.codezjx.alinker.ITransfer"; /** * Construct the stub at attach it to the interface. */ Stub() { this.attachInterface(this, DESCRIPTOR); } /** * Cast an IBinder object into an com.codezjx.alinker.ITransfer interface, * generating a proxy if needed. */ static ITransfer asInterface(android.os.IBinder obj) { if ((obj == null)) { return null; } android.os.IInterface iin = obj.queryLocalInterface(DESCRIPTOR); if (((iin != null) && (iin instanceof ITransfer))) { return ((ITransfer) iin); } return new Proxy(obj); } @Override public android.os.IBinder asBinder() { return this; } @Override public boolean onTransact(int code, android.os.Parcel data, android.os.Parcel reply, int flags) throws android.os.RemoteException { switch (code) { case INTERFACE_TRANSACTION: { reply.writeString(DESCRIPTOR); return true; } case TRANSACTION_execute: { data.enforceInterface(DESCRIPTOR); Request _arg0; if ((0 != data.readInt())) { _arg0 = Request.CREATOR.createFromParcel(data); } else { _arg0 = null; } Response _result = this.execute(_arg0); if ((flags & android.os.IBinder.FLAG_ONEWAY) != 0) { // One-way mode just execute and return directly. return true; } reply.writeNoException(); if ((_result != null)) { reply.writeInt(1); _result.writeToParcel(reply, android.os.Parcelable.PARCELABLE_WRITE_RETURN_VALUE); } else { reply.writeInt(0); } if ((_arg0 != null)) { reply.writeInt(1); _arg0.writeToParcel(reply, android.os.Parcelable.PARCELABLE_WRITE_RETURN_VALUE); } else { reply.writeInt(0); } return true; } case TRANSACTION_register: { data.enforceInterface(DESCRIPTOR); ICallback _arg0; _arg0 = ICallback.Stub.asInterface(data.readStrongBinder()); this.register(_arg0); reply.writeNoException(); return true; } case TRANSACTION_unRegister: { data.enforceInterface(DESCRIPTOR); ICallback _arg0; _arg0 = ICallback.Stub.asInterface(data.readStrongBinder()); this.unRegister(_arg0); reply.writeNoException(); return true; } } return super.onTransact(code, data, reply, flags); } private static class Proxy implements ITransfer { private android.os.IBinder mRemote; Proxy(android.os.IBinder remote) { mRemote = remote; } @Override public android.os.IBinder asBinder() { return mRemote; } public String getInterfaceDescriptor() { return DESCRIPTOR; } @Override public Response execute(Request request) throws android.os.RemoteException { android.os.Parcel _data = android.os.Parcel.obtain(); android.os.Parcel _reply = android.os.Parcel.obtain(); Response _result; try { _data.writeInterfaceToken(DESCRIPTOR); if ((request != null)) { _data.writeInt(1); request.writeToParcel(_data, 0); } else { _data.writeInt(0); } // One-way mode just transact and return directly. if (request != null && request.isOneWay()) { mRemote.transact(Stub.TRANSACTION_execute, _data, null, android.os.IBinder.FLAG_ONEWAY); return null; } mRemote.transact(Stub.TRANSACTION_execute, _data, _reply, 0); _reply.readException(); if ((0 != _reply.readInt())) { _result = Response.CREATOR.createFromParcel(_reply); } else { _result = null; } if ((0 != _reply.readInt())) { request.readFromParcel(_reply); } } finally { _reply.recycle(); _data.recycle(); } return _result; } @Override public void register(ICallback callback) throws android.os.RemoteException { android.os.Parcel _data = android.os.Parcel.obtain(); android.os.Parcel _reply = android.os.Parcel.obtain(); try { _data.writeInterfaceToken(DESCRIPTOR); _data.writeStrongBinder((((callback != null)) ? (callback.asBinder()) : (null))); mRemote.transact(Stub.TRANSACTION_register, _data, _reply, 0); _reply.readException(); } finally { _reply.recycle(); _data.recycle(); } } @Override public void unRegister(ICallback callback) throws android.os.RemoteException { android.os.Parcel _data = android.os.Parcel.obtain(); android.os.Parcel _reply = android.os.Parcel.obtain(); try { _data.writeInterfaceToken(DESCRIPTOR); _data.writeStrongBinder((((callback != null)) ? (callback.asBinder()) : (null))); mRemote.transact(Stub.TRANSACTION_unRegister, _data, _reply, 0); _reply.readException(); } finally { _reply.recycle(); _data.recycle(); } } } static final int TRANSACTION_execute = (android.os.IBinder.FIRST_CALL_TRANSACTION + 0); static final int TRANSACTION_register = (android.os.IBinder.FIRST_CALL_TRANSACTION + 1); static final int TRANSACTION_unRegister = (android.os.IBinder.FIRST_CALL_TRANSACTION + 2); } Response execute(Request request) throws android.os.RemoteException; void register(ICallback callback) throws android.os.RemoteException; void unRegister(ICallback callback) throws android.os.RemoteException; }
4,083
7,892
/********************************************************************** Audacity: A Digital Audio Editor AVIOContextWrapperImpl.inl <NAME> **********************************************************************/ class AVIOContextWrapperImpl : public AVIOContextWrapper { public: explicit AVIOContextWrapperImpl(const FFmpegFunctions& ffmpeg) : AVIOContextWrapper(ffmpeg) {} ~AVIOContextWrapperImpl() { if (mAVIOContext != nullptr) mFFmpeg.av_free(mAVIOContext->buffer); } unsigned char* GetBuffer() const noexcept override { if (mAVIOContext != nullptr) return mAVIOContext->buffer; return {}; } int GetBufferSize() const noexcept override { if (mAVIOContext != nullptr) return mAVIOContext->buffer_size; return {}; } unsigned char* GetBufPtr() const noexcept override { if (mAVIOContext != nullptr) return mAVIOContext->buf_ptr; return {}; } unsigned char* GetBufEnd() const noexcept override { if (mAVIOContext != nullptr) return mAVIOContext->buf_end; return {}; } void* GetOpaque() const noexcept override { if (mAVIOContext != nullptr) return mAVIOContext->opaque; return {}; } void SetOpaque(void* opaque) noexcept override { if (mAVIOContext != nullptr) mAVIOContext->opaque = opaque; } int64_t GetPos() const noexcept override { if (mAVIOContext != nullptr) return mAVIOContext->pos; return {}; } int GetEofReached() const noexcept override { if (mAVIOContext != nullptr) return mAVIOContext->eof_reached; return {}; } int GetWriteFlag() const noexcept override { if (mAVIOContext != nullptr) return mAVIOContext->write_flag; return {}; } void SetWriteFlag(int write_flag) noexcept override { if (mAVIOContext != nullptr) mAVIOContext->write_flag = write_flag; } int GetError() const noexcept override { if (mAVIOContext != nullptr) return mAVIOContext->error; return {}; } void SetError(int error) noexcept override { if (mAVIOContext != nullptr) mAVIOContext->error = error; } int GetSeekable() const noexcept override { if (mAVIOContext != nullptr) return mAVIOContext->seekable; return {}; } void SetSeekable(int seekable) noexcept override { if (mAVIOContext != nullptr) mAVIOContext->seekable = seekable; } int GetDirect() const noexcept override { if (mAVIOContext != nullptr) return mAVIOContext->direct; return {}; } void SetDirect(int direct) noexcept override { if (mAVIOContext != nullptr) mAVIOContext->direct = direct; } }; std::unique_ptr<AVIOContextWrapper> CreateAVIOContextWrapper(const FFmpegFunctions& ffmpeg) { return std::make_unique<AVIOContextWrapperImpl>(ffmpeg); }
1,213
5,169
{ "name": "RARotaryWheel", "version": "0.1.0", "summary": "RARotaryWheel is a subclass of UIView to draw wheel/circle.", "description": "RARotaryWheel is a subclass of UIView, which let you able to draw wheel (circle) with dynamic number of segments Edit", "homepage": "https://github.com/rallahaseh/RARotaryWheel", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "rashed": "<EMAIL>" }, "source": { "git": "https://github.com/rallahaseh/RARotaryWheel.git", "tag": "0.1.0" }, "social_media_url": "https://twitter.com/rallahaseh", "platforms": { "ios": "8.0" }, "source_files": "RARotaryWheel/Classes/**/*" }
276
5,169
<filename>Specs/Mng-perf/5.1.2/Mng-perf.podspec.json { "name": "Mng-perf", "version": "5.1.2", "summary": "Mng-perf provides functionalities for monetizing your mobile application", "description": " Mng-perf provides functionalities for monetizing your mobile application\n \n", "homepage": "https://bitbucket.org/mngcorp/mngperf-demo-ios", "license": "Commercial", "authors": { "MOBILENETWORKGROUP": "http://www.mobilenetworkgroup.com" }, "source": { "git": "https://bitbucket.org/mngcorp/mngperf-demo-ios.git", "tag": "v5.1.2" }, "platforms": { "ios": "4.3" }, "requires_arc": true, "source_files": "Mng-perf/**/*.h", "preserve_paths": "Mng-perf/*.a", "resources": "Mng-perf/*.bundle", "libraries": "Mng-perf", "xcconfig": { "LIBRARY_SEARCH_PATHS": "\"$(PODS_ROOT)/Mng-perf/Mng-perf\"" }, "frameworks": [ "CoreGraphics", "SystemConfiguration", "MediaPlayer", "EventKitUI", "EventKit", "AdSupport", "StoreKit", "CoreLocation", "CoreTelephony", "MessageUI" ] }
518
61,676
from unittest import mock from django.core.management import call_command from django.core.management.base import CommandError from django.db import connection from django.test import SimpleTestCase class DbshellCommandTestCase(SimpleTestCase): def test_command_missing(self): msg = ( 'You appear not to have the %r program installed or on your path.' % connection.client.executable_name ) with self.assertRaisesMessage(CommandError, msg): with mock.patch('subprocess.run', side_effect=FileNotFoundError): call_command('dbshell')
223
3,459
<reponame>werminghoff/Provenance #ifndef FILTER_NTSC_H_ #define FILTER_NTSC_H_ #include <stdio.h> #include "atari_ntsc/atari_ntsc.h" /* Limits for the adjustable values. */ #define FILTER_NTSC_SHARPNESS_MIN -1.0 #define FILTER_NTSC_SHARPNESS_MAX 1.0 #define FILTER_NTSC_RESOLUTION_MIN -1.0 #define FILTER_NTSC_RESOLUTION_MAX 1.0 #define FILTER_NTSC_ARTIFACTS_MIN -1.0 #define FILTER_NTSC_ARTIFACTS_MAX 1.0 #define FILTER_NTSC_FRINGING_MIN -1.0 #define FILTER_NTSC_FRINGING_MAX 1.0 #define FILTER_NTSC_BLEED_MIN -1.0 #define FILTER_NTSC_BLEED_MAX 1.0 #define FILTER_NTSC_BURST_PHASE_MIN -1.0 #define FILTER_NTSC_BURST_PHASE_MAX 1.0 /* Contains controls used to adjust the palette in the NTSC filter. */ extern atari_ntsc_setup_t FILTER_NTSC_setup; /* Pointer to the NTSC filter structure. Initialise it by setting it to value returned by FILTER_NTSC_New(). */ extern atari_ntsc_t *FILTER_NTSC_emu; /* Allocates memory for a new NTSC filter. */ atari_ntsc_t *FILTER_NTSC_New(void); /* Frees memory used by an NTSC filter, FILTER. */ void FILTER_NTSC_Delete(atari_ntsc_t *filter); /* Reinitialises an NTSC filter, FILTER. Should be called after changing palette setup or loading/unloading an external palette. */ void FILTER_NTSC_Update(atari_ntsc_t *filter); /* Restores default values for NTSC-filter-specific colour controls. FILTER_NTSC_Update should be called afterwards to apply changes. */ void FILTER_NTSC_RestoreDefaults(void); /* Set/get one of the available preset adjustments: Composite, S-Video, RGB, Monochrome. */ enum { FILTER_NTSC_PRESET_COMPOSITE, FILTER_NTSC_PRESET_SVIDEO, FILTER_NTSC_PRESET_RGB, FILTER_NTSC_PRESET_MONOCHROME, FILTER_NTSC_PRESET_CUSTOM, /* Number of "normal" (not including CUSTOM) values in enumerator */ FILTER_NTSC_PRESET_SIZE = FILTER_NTSC_PRESET_CUSTOM }; /* FILTER_NTSC_Update should be called afterwards these functions to apply changes. */ void FILTER_NTSC_SetPreset(int preset); int FILTER_NTSC_GetPreset(void); void FILTER_NTSC_NextPreset(void); /* Initialise variables before loading from config file. */ void FILTER_NTSC_PreInitialise(void); /* Read/write to configuration file. */ int FILTER_NTSC_ReadConfig(char *option, char *ptr); void FILTER_NTSC_WriteConfig(FILE *fp); /* NTSC filter initialisation and processing of command-line arguments. */ int FILTER_NTSC_Initialise(int *argc, char *argv[]); #endif /* FILTER_NTSC_H_ */
927
626
<filename>src/main/python/rerank/scripts/interpolate.py '''scripts to aggregate the retrieval scores and the reranking scores by linear interpolation This scipt requires five input files: 1. Four run files following the TREC format (query-id Q0 document-id rank score STANDARD) of both the baseline run and the rerank run on both the validation set and the test set 2. One qrel file following the TREC format (query-id 0 document-id relevance) that contains relevance info on the whole dataset (including dev and test set) This script will print out the tuned scores on the test set (Map, P30, Mrr, P20, NDCG20). You can modify the trec_eval arguments to try other evaluation metrics. The hyper-parameter \lamda is tuned on the dev set ''' import numpy as np import shlex import subprocess import sys import pprint import argparse def get_docsim(fn, docno2sim={}): p = open(fn) docno_list = [] sim_list = [] count = 0 for l in p: ls = l[:-1].split() if len(ls) == 7: qid, iternum, docno, rank, sim, run_id, label = ls else: qid, iternum, docno, rank, sim, run_id = ls if docno+"_"+qid in docno2sim: # print("docno {} already in docno2sim".format(docno)) count += 1 else: docno2sim[docno+"_"+qid] = float(sim) sim_list.append(float(sim)) docno_list.append(docno+"_"+qid) maxSim = max(sim_list) minSim = min(sim_list) # print("count: {}".format(count)) for docnoqid in docno_list: docno2sim[docnoqid] = (docno2sim[docnoqid] - minSim) / (maxSim - minSim) return docno2sim def get_map_inter(docno2sim, docno2sim_ql, l, model, fn_qrels, debug=False, mode="train"): docno2sim_inter = {} for docnoqid in docno2sim: if docnoqid in docno2sim_ql: docno2sim_inter[docnoqid] = docno2sim[docnoqid] * l + docno2sim_ql[docnoqid] * (1 -l) if mode == "train": fn_inter = "predict.inter.{}.{}.l{:.2f}".format(mode, model, l) else: fn_inter = "predict.inter.{}.{}".format(mode, model) f_inter = open(fn_inter, "w") if debug: print("total pairs: {}, written to file: {}".format(len(docno2sim_inter), fn_inter)) for docnoqid in docno2sim_inter: temp = docnoqid.split("_") docno = temp[0] qid = temp[1] score = docno2sim_inter[docno+"_"+qid] f_inter.write('{} 0 {} 0 {} {}\n'.format(qid, docno, score, model)) f_inter.close() cmd = "./eval/trec_eval.9.0.4/trec_eval {} {} -m ndcg_cut.20 -m map -m recip_rank -m P.20,30".format(fn_qrels, fn_inter) pargs = shlex.split(cmd) p = subprocess.Popen(pargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE) pout, perr = p.communicate() if debug: print("running {}".format(cmd)) if len(pout) != 0: print(pout.decode('utf-8')) else: print(perr.decode('utf-8')) if sys.version_info[0] < 3: lines = pout.split('\n') else: lines = pout.split(b'\n') Map = float(lines[0].strip().split()[-1]) Mrr = float(lines[1].strip().split()[-1]) P20 = float(lines[2].strip().split()[-1]) P30 = float(lines[3].strip().split()[-1]) NDCG20 = float(lines[4].strip().split()[-1]) return Map, P30, Mrr, P20, NDCG20 def get_inter_tune(l, fn_baseline_dev, fn_rerank_dev, model, fn_qrels, debug=False): docno2sim = {} docno2sim_ql = {} docno2sim_ql = get_docsim(fn_baseline_dev, docno2sim_ql) docno2sim = get_docsim(fn_rerank_dev, docno2sim) return get_map_inter(docno2sim, docno2sim_ql, l, model, fn_qrels, debug=debug, mode="dev") def tune_lambda(fn_baseline_train, fn_rerank_dev, model, fn_qrels, debug=False): maxL = 0 maxMap = 0 ll = [] for l in range(0, 100, 5): l = l / 100.0 Map, P30, Mrr, P20, NDCG20 = get_inter_tune(l, fn_baseline_train, fn_rerank_dev, model=model, debug=debug, fn_qrels=fn_qrels) ll.append(Map) if Map > maxMap: maxMap = Map maxL = l print("best lambda: {} with MAP: {}".format(maxL, maxMap)) return maxL, ll def get_inter_test(model, l, fn_baseline, fn_rerank, fn_qrels, debug=False): docno2sim = {} docno2sim_ql = {} docno2sim_ql = get_docsim(fn_baseline, docno2sim_ql) docno2sim = get_docsim(fn_rerank, docno2sim) return get_map_inter(docno2sim, docno2sim_ql, l, fn_qrels=fn_qrels, debug=debug, mode="test", model=model) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--fn_baseline', default='predict_BM25_0.9_0.5_RM3_47_9_0.3_robust04_split1_test.txt', help='file name of the baseline run on the test set') parser.add_argument('--fn_baseline_dev', default='predict_BM25_0.9_0.5_RM3_47_9_0.3_robust04_split1_dev.txt', help='file name of the baseline run on the dev set') parser.add_argument('--fn_rerank', default='src/main/python/rerank/MatchZoo/data/robust04/predict.test.drmm.txt', help='file name of the rerank run on the test set') parser.add_argument('--fn_rerank_dev', default='src/main/python/rerank/MatchZoo/data/robust04/predict.valid.drmm.txt', help='file name of the rerank run on the dev set') parser.add_argument('--fn_qrels', default="src/main/resources/topics-and-qrels/qrels.robust2004.txt", help='qrels file of Robust04') parser.add_argument('--model_rerank', default="drmm", help='rerank model name') parser.add_argument('--debug', action='store_true', help='qrels file of Robust04') args = parser.parse_args() maxL, ll = tune_lambda(args.fn_baseline_dev, args.fn_rerank_dev, model=args.model_rerank, fn_qrels=args.fn_qrels, debug=args.debug) testMap, P30, MRR, P20, NDCG20 = get_inter_test(model=args.model_rerank, l=maxL, fn_qrels=args.fn_qrels, fn_baseline=args.fn_baseline, fn_rerank=args.fn_rerank, debug=args.debug) print("Model: {}, Map={:.4f}, MRR={:.4f}, P30={:.4f}, P20={:.4f}, NDCG20={:.4f}, with lambda = {}" .format(args.model_rerank, testMap, MRR, P30, P20, NDCG20, maxL))
2,699
348
{"nom":"Saint-Vallier","circ":"4ème circonscription","dpt":"Drôme","inscrits":2122,"abs":1291,"votants":831,"blancs":90,"nuls":39,"exp":702,"res":[{"nuance":"LR","nom":"Mme <NAME>","voix":359},{"nuance":"REM","nom":"<NAME>","voix":343}]}
96
479
<reponame>cfogelklou/Monocypher // This file is dual-licensed. Choose whichever licence you want from // the two licences listed below. // // The first licence is a regular 2-clause BSD licence. The second licence // is the CC-0 from Creative Commons. It is intended to release Monocypher // to the public domain. The BSD licence serves as a fallback option. // // SPDX-License-Identifier: BSD-2-Clause OR CC0-1.0 // // ------------------------------------------------------------------------ // // Copyright (c) 2017-2019, <NAME> // All rights reserved. // // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the // distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // ------------------------------------------------------------------------ // // Written in 2017-2019 by <NAME> // // To the extent possible under law, the author(s) have dedicated all copyright // and related neighboring rights to this software to the public domain // worldwide. This software is distributed without any warranty. // // You should have received a copy of the CC0 Public Domain Dedication along // with this software. If not, see // <https://creativecommons.org/publicdomain/zero/1.0/> #include <sodium.h> #include "utils.h" static void test(size_t nb_blocks, size_t hash_size, size_t nb_iterations) { RANDOM_INPUT(password, 16 ); RANDOM_INPUT(salt , crypto_pwhash_SALTBYTES); u8 hash[256]; if (crypto_pwhash(hash, hash_size, (char*)password, 16, salt, nb_iterations, nb_blocks * 1024, crypto_pwhash_ALG_ARGON2I13)) { fprintf(stderr, "Argon2i failed. " "nb_blocks = %lu, " "hash_size = %lu " "nb_iterations = %lu\n", nb_blocks, hash_size, nb_iterations); printf(":deadbeef:\n"); // prints a canary to fail subsequent tests } print_number(nb_blocks ); print_number(nb_iterations ); print_vector(password, 16 ); print_vector(salt , crypto_pwhash_SALTBYTES); printf(":\n:\n"); // no key, no additionnal data print_vector(hash , hash_size ); printf("\n"); } int main(void) { SODIUM_INIT; FOR (nb_blocks , 508, 516) { test(nb_blocks, 32 , 3 ); } FOR (hash_size , 63, 65) { test(8 , hash_size, 3 ); } FOR (nb_iterations, 3, 6) { test(8 , 32 , nb_iterations); } return 0; }
1,394
606
<filename>base/src/main/java/org/arend/core/context/param/DependentLink.java package org.arend.core.context.param; import org.arend.core.context.LinkList; import org.arend.core.context.binding.Binding; import org.arend.core.expr.Expression; import org.arend.core.expr.ReferenceExpression; import org.arend.core.expr.UniverseExpression; import org.arend.core.expr.type.Type; import org.arend.core.subst.ExprSubstitution; import org.arend.ext.core.level.LevelSubstitution; import org.arend.core.subst.SubstVisitor; import org.arend.ext.core.context.CoreBinding; import org.arend.ext.core.context.CoreParameter; import org.arend.ext.core.expr.AbstractedExpression; import org.arend.extImpl.AbstractedDependentLinkType; import org.arend.typechecking.result.TypecheckingResult; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.List; import java.util.Map; public interface DependentLink extends Binding, CoreParameter { void setExplicit(boolean isExplicit); void setType(Type type); @NotNull @Override DependentLink getNext(); void setNext(DependentLink next); void setName(String name); DependentLink subst(SubstVisitor substVisitor, int size, boolean updateSubst); TypedDependentLink getNextTyped(List<String> names); @Override default Binding subst(SubstVisitor visitor) { return visitor.isEmpty() ? this : Helper.subst(this, visitor); } @NotNull @Override default TypecheckingResult getTypedType() { Type type = getType(); return new TypecheckingResult(type.getExpr(), new UniverseExpression(type.getSortOfType())); } @NotNull @Override default CoreBinding getBinding() { return this; } @NotNull @Override default Expression getTypeExpr() { return getType().getExpr(); } @Override default @NotNull AbstractedExpression abstractType(int size) { if (size >= Helper.size(this)) { throw new IllegalArgumentException(); } return AbstractedDependentLinkType.make(this, size); } @Override default @NotNull CoreParameter insertParameters(@NotNull Map<CoreParameter, CoreParameter> map) { if (map.isEmpty()) return this; LinkList list = new LinkList(); ExprSubstitution substitution = new ExprSubstitution(); SubstVisitor visitor = new SubstVisitor(substitution, LevelSubstitution.EMPTY); for (DependentLink param = this; param.hasNext(); param = param.getNext()) { DependentLink newParam = param.subst(visitor, 1, false); list.append(newParam); substitution.add(param, new ReferenceExpression(newParam)); CoreParameter param1 = map.get(param); if (param1 != null) { if (!(param1 instanceof DependentLink)) { throw new IllegalArgumentException(); } DependentLink param2 = Helper.subst((DependentLink) param1, visitor); list.append(param2); substitution.add((DependentLink) param1, new ReferenceExpression(param2)); } } return list.getFirst(); } class Helper { public static void freeSubsts(DependentLink link, ExprSubstitution substitution) { for (; link.hasNext(); link = link.getNext()) { substitution.remove(link); } } public static ExprSubstitution toSubstitution(DependentLink link, List<? extends Expression> expressions) { ExprSubstitution result = new ExprSubstitution(); for (Expression expression : expressions) { result.add(link, expression); link = link.getNext(); } return result; } public static ExprSubstitution toSubstitution(List<DependentLink> links, List<? extends Expression> expressions) { ExprSubstitution result = new ExprSubstitution(); for (int i = 0; i < Math.min(links.size(), expressions.size()); ++i) { result.add(links.get(i), expressions.get(i)); } return result; } public static int size(DependentLink link) { int result = 0; for (; link.hasNext(); link = link.getNext()) { result++; } return result; } public static DependentLink get(DependentLink link, int index) { for (int i = 0; i < index; i++) { if (!link.hasNext()) { return EmptyDependentLink.getInstance(); } link = link.getNext(); } return link; } public static DependentLink getLast(DependentLink link) { DependentLink last = link; for (; link.hasNext(); link = link.getNext()) { last = link; } return last; } public static List<DependentLink> toList(DependentLink link) { List<DependentLink> result = new ArrayList<>(); for (; link.hasNext(); link = link.getNext()) { result.add(link); } return result; } public static DependentLink subst(DependentLink link, ExprSubstitution exprSubst, LevelSubstitution levelSubst) { return link.subst(new SubstVisitor(exprSubst, levelSubst), Integer.MAX_VALUE, false); } public static DependentLink subst(DependentLink link, SubstVisitor substVisitor) { return link.subst(substVisitor, Integer.MAX_VALUE, false); } public static DependentLink subst(DependentLink link, ExprSubstitution substitution, boolean updateSubst) { return link.subst(new SubstVisitor(substitution, LevelSubstitution.EMPTY), Integer.MAX_VALUE, updateSubst); } public static DependentLink subst(DependentLink link, ExprSubstitution substitution) { return subst(link, substitution, LevelSubstitution.EMPTY); } public static DependentLink copy(DependentLink link) { return subst(link, new ExprSubstitution(), LevelSubstitution.EMPTY); } public static SingleDependentLink subst(SingleDependentLink link, ExprSubstitution substitution) { return subst(link, new SubstVisitor(substitution, LevelSubstitution.EMPTY)); } public static List<DependentLink> subst(List<DependentLink> links, ExprSubstitution exprSubst, LevelSubstitution levelSubst) { List<DependentLink> newLinks = new ArrayList<>(); int i = 0; while (i < links.size()) { DependentLink substLink = DependentLink.Helper.subst(links.get(i), exprSubst, levelSubst); while (substLink.hasNext()) { newLinks.add(substLink); substLink = substLink.getNext(); ++i; } } return newLinks; } public static SingleDependentLink subst(SingleDependentLink link, SubstVisitor substVisitor) { return link.subst(substVisitor, Integer.MAX_VALUE, false); } public static DependentLink take(DependentLink link, int size) { return link.subst(new SubstVisitor(new ExprSubstitution(), LevelSubstitution.EMPTY), size, false); } public static SingleDependentLink take(SingleDependentLink link, int size) { return link.subst(new SubstVisitor(new ExprSubstitution(), LevelSubstitution.EMPTY), size, false); } } static String toString(DependentLink binding) { return (binding.getName() == null ? "_" : binding.getName()) + " : " + binding.getTypeExpr(); } }
2,539
997
#ifndef NTT_H #define NTT_H #include <stdint.h> #include "params.h" #define zetas KYBER_NAMESPACE(zetas) extern const int16_t zetas[128]; #define ntt KYBER_NAMESPACE(ntt) void ntt(int16_t poly[256]); #define invntt KYBER_NAMESPACE(invntt) void invntt(int16_t poly[256]); #define basemul KYBER_NAMESPACE(basemul) void basemul(int16_t r[2], const int16_t a[2], const int16_t b[2], int16_t zeta); #endif
192
777
/* * Artificial Intelligence for Humans * Volume 2: Nature Inspired Algorithms * Java Version * http://www.aifh.org * http://www.jeffheaton.com * * Code repository: * https://github.com/jeffheaton/aifh * * Copyright 2014 by <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For more information on Heaton Research copyrights, licenses * and trademarks visit: * http://www.heatonresearch.com/copyright */ package com.heatonresearch.aifh.aco; import com.heatonresearch.aifh.AIFH; import com.heatonresearch.aifh.learning.LearningMethod; import com.heatonresearch.aifh.learning.MLMethod; import com.heatonresearch.aifh.learning.score.ScoreFunction; import com.heatonresearch.aifh.randomize.GenerateRandom; import com.heatonresearch.aifh.randomize.MersenneTwisterGenerateRandom; import java.util.Arrays; /** * This class implements continuous ant colony optimization (CACO) * <p/> * References: * <p/> * Training Neural Networks with Ant Colony Optimization, * <NAME>, Spring, 2013 * <p/> * <NAME> and <NAME>. “An ant colony optimization algorithm for * continuous optimization: application to feed-forward neural network training”, in * Springer London (2007). * <p/> * M.Dorigo, V.Maniezzo, and A.Colorni. “Ant System: Optimization by a colony of * cooperating agents”, in IEEE Transactions on Systems, Man, and Cybernetics, * 1996. */ public class ContinuousACO implements LearningMethod { /** * Sigma constant. Minimum standard deviation. */ public static final double CONST_SIGMA = 0.1; /** * Q constant. Weighting exponent factor. */ public static final double CONST_Q = 0.08; /** * The population of ants. */ private final ContinuousAnt[] population; /** * The population size. */ private final int populationSize; /** * The parameter count. */ private int paramCount = 0; /** * The weighting of each ant. */ private final double[] weighting; /** * The sum of the weighting. */ private double sumWeighting = 0; /** * Epsilon, learning rate. */ private double epsilon = .75; /** * Random number generation. */ private GenerateRandom random; /** * The algorithm that we are fitting. */ private MLMethod algorithm; /** * The score function. */ private ScoreFunction score; /** * The constructor. * * @param theAlgorithm The algorithm to fit. * @param theScore The score function. * @param thePopulationSize The population size. */ public ContinuousACO(final MLMethod theAlgorithm, final ScoreFunction theScore, final int thePopulationSize) { this.algorithm = theAlgorithm; this.populationSize = thePopulationSize; this.score = theScore; this.random = new MersenneTwisterGenerateRandom(); this.paramCount = theAlgorithm.getLongTermMemory().length; this.population = new ContinuousAnt[thePopulationSize * 2]; this.weighting = new double[thePopulationSize]; for (int i = 0; i < this.population.length; i++) { this.population[i] = new ContinuousAnt(paramCount, score.shouldMinimize()); for (int j = 0; j < paramCount; j++) { this.population[i].getParams()[j] = random.nextDouble(-1, 1); } } updateScore(); Arrays.sort(this.population); computeWeighting(); sampleSolutions(); Arrays.sort(this.population); } /** * Update the score. */ private void updateScore() { for (final ContinuousAnt aPopulation : this.population) { System.arraycopy(aPopulation.getParams(), 0, this.algorithm.getLongTermMemory(), 0, this.paramCount); aPopulation.setScore(this.score.calculateScore(this.algorithm)); } } /** * Compute the weighting for each ant. */ private void computeWeighting() { sumWeighting = 0; double coef = (1 / (0.1 * Math.sqrt(2 * Math.PI))); for (int i = 0; i < this.populationSize; i++) { double exponent = (i * i) / (2 * CONST_Q * CONST_Q * this.populationSize * this.populationSize); this.weighting[i] = coef * Math.exp(-exponent); sumWeighting += weighting[i]; } } /** * Compute the standard deviation. * * @param x The parameter to compute for. * @param l The population member. * @return The standard deviation. */ private double computeSD(int x, int l) { double sum = 0.0; for (int i = 0; i < this.populationSize; i++) { sum += Math.abs(this.population[i].getParams()[x] - this.population[l].getParams()[x]) / (this.populationSize - 1); } if (sum < AIFH.DEFAULT_PRECISION) { return CONST_SIGMA; } return (epsilon * sum); } /** * Select a probability distribution function (PDF). * * @return The PDF index. */ private int selectPDF() { int l = 0; double temp = 0; double r = random.nextDouble(); for (int i = 0; i < this.populationSize; i++) { temp += weighting[i] / sumWeighting; if (r < temp) { l = i; break; } } return l; } /** * Sample new parameters. */ private void sampleSolutions() { for (int i = this.populationSize; i < this.population.length; i++) { int pdf = selectPDF(); for (int j = 0; j < paramCount; j++) { double sigma = computeSD(j, pdf); double mu = this.population[pdf].getParams()[j]; double d = (random.nextGaussian() * sigma) + mu; this.population[i].getParams()[j] = d; } } } /** * @return The value for epsilon, the learning rate. */ public double getEpsilon() { return epsilon; } /** * Set epsilon, the learning rate. * * @param epsilon The epsilon value. */ public void setEpsilon(final double epsilon) { this.epsilon = epsilon; } /** * @return Random number generator. */ public GenerateRandom getRandom() { return random; } public void setRandom(final GenerateRandom random) { this.random = random; } /** * {@inheritDoc} */ @Override public void iteration() { computeWeighting(); sampleSolutions(); updateScore(); Arrays.sort(this.population); } /** * {@inheritDoc} */ @Override public double getLastError() { return this.population[0].getScore(); } /** * {@inheritDoc} */ @Override public boolean done() { return false; } /** * {@inheritDoc} */ @Override public String getStatus() { return ""; } /** * {@inheritDoc} */ @Override public void finishTraining() { System.arraycopy(this.population[0].getParams(), 0, this.algorithm.getLongTermMemory(), 0, this.algorithm.getLongTermMemory().length); } }
3,137
2,151
/* * Copyright (C) 2006, 2008 Apple Inc. All rights reserved. * Copyright (C) 2009 Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_LOADER_FETCH_RESOURCE_RESPONSE_H_ #define THIRD_PARTY_BLINK_RENDERER_PLATFORM_LOADER_FETCH_RESOURCE_RESPONSE_H_ #include <memory> #include <utility> #include "base/memory/scoped_refptr.h" #include "base/time/time.h" #include "services/network/public/mojom/fetch_api.mojom-blink.h" #include "third_party/blink/public/platform/web_url_response.h" #include "third_party/blink/renderer/platform/blob/blob_data.h" #include "third_party/blink/renderer/platform/loader/fetch/resource_load_info.h" #include "third_party/blink/renderer/platform/loader/fetch/resource_load_timing.h" #include "third_party/blink/renderer/platform/network/http_header_map.h" #include "third_party/blink/renderer/platform/network/http_parsers.h" #include "third_party/blink/renderer/platform/platform_export.h" #include "third_party/blink/renderer/platform/weborigin/kurl.h" #include "third_party/blink/renderer/platform/wtf/noncopyable.h" #include "third_party/blink/renderer/platform/wtf/ref_counted.h" #include "third_party/blink/renderer/platform/wtf/text/cstring.h" #include "third_party/blink/renderer/platform/wtf/time.h" #include "third_party/blink/renderer/platform/wtf/vector.h" namespace blink { struct CrossThreadResourceResponseData; // A ResourceResponse is a "response" object used in blink. Conceptually // it is https://fetch.spec.whatwg.org/#concept-response, but it contains // a lot of blink specific fields. WebURLResponse is the "public version" // of this class and public classes (i.e., classes in public/platform) use it. // // There are cases where we need to copy a response across threads, and // CrossThreadResourceResponseData is a struct for the purpose. When you add a // member variable to this class, do not forget to add the corresponding // one in CrossThreadResourceResponseData and write copying logic. class PLATFORM_EXPORT ResourceResponse final { DISALLOW_NEW_EXCEPT_PLACEMENT_NEW(); public: enum HTTPVersion : uint8_t { kHTTPVersionUnknown, kHTTPVersion_0_9, kHTTPVersion_1_0, kHTTPVersion_1_1, kHTTPVersion_2_0 }; enum SecurityStyle : uint8_t { kSecurityStyleUnknown, kSecurityStyleUnauthenticated, kSecurityStyleAuthenticationBroken, kSecurityStyleAuthenticated }; enum CTPolicyCompliance { kCTPolicyComplianceDetailsNotAvailable, kCTPolicyComplies, kCTPolicyDoesNotComply }; class PLATFORM_EXPORT SignedCertificateTimestamp final { public: SignedCertificateTimestamp(String status, String origin, String log_description, String log_id, int64_t timestamp, String hash_algorithm, String signature_algorithm, String signature_data) : status_(status), origin_(origin), log_description_(log_description), log_id_(log_id), timestamp_(timestamp), hash_algorithm_(hash_algorithm), signature_algorithm_(signature_algorithm), signature_data_(signature_data) {} explicit SignedCertificateTimestamp( const struct blink::WebURLResponse::SignedCertificateTimestamp&); SignedCertificateTimestamp IsolatedCopy() const; String status_; String origin_; String log_description_; String log_id_; int64_t timestamp_; String hash_algorithm_; String signature_algorithm_; String signature_data_; }; using SignedCertificateTimestampList = WTF::Vector<SignedCertificateTimestamp>; struct SecurityDetails { DISALLOW_NEW(); SecurityDetails() : valid_from(0), valid_to(0) {} // All strings are human-readable values. String protocol; // keyExchange is the empty string if not applicable for the connection's // protocol. String key_exchange; // keyExchangeGroup is the empty string if not applicable for the // connection's key exchange. String key_exchange_group; String cipher; // mac is the empty string when the connection cipher suite does not // have a separate MAC value (i.e. if the cipher suite is AEAD). String mac; String subject_name; Vector<String> san_list; String issuer; time_t valid_from; time_t valid_to; // DER-encoded X509Certificate certificate chain. Vector<AtomicString> certificate; SignedCertificateTimestampList sct_list; }; class ExtraData : public RefCounted<ExtraData> { public: virtual ~ExtraData() = default; }; explicit ResourceResponse(CrossThreadResourceResponseData*); // Gets a copy of the data suitable for passing to another thread. std::unique_ptr<CrossThreadResourceResponseData> CopyData() const; ResourceResponse(); explicit ResourceResponse( const KURL&, const AtomicString& mime_type = g_null_atom, long long expected_length = 0, const AtomicString& text_encoding_name = g_null_atom); ResourceResponse(const ResourceResponse&); ResourceResponse& operator=(const ResourceResponse&); bool IsNull() const { return is_null_; } bool IsHTTP() const; // The URL of the resource. Note that if a service worker responded to the // request for this resource, it may have fetched an entirely different URL // and responded with that resource. wasFetchedViaServiceWorker() and // originalURLViaServiceWorker() can be used to determine whether and how a // service worker responded to the request. Example service worker code: // // onfetch = (event => { // if (event.request.url == 'https://abc.com') // event.respondWith(fetch('https://def.com')); // }); // // If this service worker responds to an "https://abc.com" request, then for // the resulting ResourceResponse, url() is "https://abc.com", // wasFetchedViaServiceWorker() is true, and originalURLViaServiceWorker() is // "https://def.com". const KURL& Url() const; void SetURL(const KURL&); const AtomicString& MimeType() const; void SetMimeType(const AtomicString&); long long ExpectedContentLength() const; void SetExpectedContentLength(long long); const AtomicString& TextEncodingName() const; void SetTextEncodingName(const AtomicString&); int HttpStatusCode() const; void SetHTTPStatusCode(int); const AtomicString& HttpStatusText() const; void SetHTTPStatusText(const AtomicString&); const AtomicString& HttpHeaderField(const AtomicString& name) const; void SetHTTPHeaderField(const AtomicString& name, const AtomicString& value); void AddHTTPHeaderField(const AtomicString& name, const AtomicString& value); void ClearHTTPHeaderField(const AtomicString& name); const HTTPHeaderMap& HttpHeaderFields() const; bool IsMultipart() const { return MimeType() == "multipart/x-mixed-replace"; } bool IsAttachment() const; AtomicString HttpContentType() const; // These functions return parsed values of the corresponding response headers. // NaN means that the header was not present or had invalid value. bool CacheControlContainsNoCache() const; bool CacheControlContainsNoStore() const; bool CacheControlContainsMustRevalidate() const; bool HasCacheValidatorFields() const; double CacheControlMaxAge() const; double Date() const; double Age() const; double Expires() const; double LastModified() const; unsigned ConnectionID() const; void SetConnectionID(unsigned); bool ConnectionReused() const; void SetConnectionReused(bool); bool WasCached() const; void SetWasCached(bool); ResourceLoadTiming* GetResourceLoadTiming() const; void SetResourceLoadTiming(scoped_refptr<ResourceLoadTiming>); scoped_refptr<ResourceLoadInfo> GetResourceLoadInfo() const; void SetResourceLoadInfo(scoped_refptr<ResourceLoadInfo>); HTTPVersion HttpVersion() const { return http_version_; } void SetHTTPVersion(HTTPVersion version) { http_version_ = version; } bool HasMajorCertificateErrors() const { return has_major_certificate_errors_; } void SetHasMajorCertificateErrors(bool has_major_certificate_errors) { has_major_certificate_errors_ = has_major_certificate_errors; } CTPolicyCompliance GetCTPolicyCompliance() const { return ct_policy_compliance_; } void SetCTPolicyCompliance(CTPolicyCompliance); bool IsLegacySymantecCert() const { return is_legacy_symantec_cert_; } void SetIsLegacySymantecCert(bool is_legacy_symantec_cert) { is_legacy_symantec_cert_ = is_legacy_symantec_cert; } SecurityStyle GetSecurityStyle() const { return security_style_; } void SetSecurityStyle(SecurityStyle security_style) { security_style_ = security_style; } const SecurityDetails* GetSecurityDetails() const { return &security_details_; } void SetSecurityDetails(const String& protocol, const String& key_exchange, const String& key_exchange_group, const String& cipher, const String& mac, const String& subject_name, const Vector<String>& san_list, const String& issuer, time_t valid_from, time_t valid_to, const Vector<AtomicString>& certificate, const SignedCertificateTimestampList& sct_list); long long AppCacheID() const { return app_cache_id_; } void SetAppCacheID(long long id) { app_cache_id_ = id; } const KURL& AppCacheManifestURL() const { return app_cache_manifest_url_; } void SetAppCacheManifestURL(const KURL& url) { app_cache_manifest_url_ = url; } bool WasFetchedViaSPDY() const { return was_fetched_via_spdy_; } void SetWasFetchedViaSPDY(bool value) { was_fetched_via_spdy_ = value; } // See ServiceWorkerResponseInfo::was_fetched_via_service_worker. bool WasFetchedViaServiceWorker() const { return was_fetched_via_service_worker_; } void SetWasFetchedViaServiceWorker(bool value) { was_fetched_via_service_worker_ = value; } // See ServiceWorkerResponseInfo::was_fallback_required. bool WasFallbackRequiredByServiceWorker() const { return was_fallback_required_by_service_worker_; } void SetWasFallbackRequiredByServiceWorker(bool value) { was_fallback_required_by_service_worker_ = value; } network::mojom::FetchResponseType ResponseTypeViaServiceWorker() const { return response_type_via_service_worker_; } void SetResponseTypeViaServiceWorker( network::mojom::FetchResponseType value) { response_type_via_service_worker_ = value; } bool IsOpaqueResponseFromServiceWorker() const; // See ServiceWorkerResponseInfo::url_list_via_service_worker. const Vector<KURL>& UrlListViaServiceWorker() const { return url_list_via_service_worker_; } void SetURLListViaServiceWorker(const Vector<KURL>& url_list) { url_list_via_service_worker_ = url_list; } // Returns the last URL of urlListViaServiceWorker if exists. Otherwise // returns an empty URL. KURL OriginalURLViaServiceWorker() const; const Vector<char>& MultipartBoundary() const { return multipart_boundary_; } void SetMultipartBoundary(const char* bytes, size_t size) { multipart_boundary_.clear(); multipart_boundary_.Append(bytes, size); } const String& CacheStorageCacheName() const { return cache_storage_cache_name_; } void SetCacheStorageCacheName(const String& cache_storage_cache_name) { cache_storage_cache_name_ = cache_storage_cache_name; } const Vector<String>& CorsExposedHeaderNames() const { return cors_exposed_header_names_; } void SetCorsExposedHeaderNames(const Vector<String>& header_names) { cors_exposed_header_names_ = header_names; } bool DidServiceWorkerNavigationPreload() const { return did_service_worker_navigation_preload_; } void SetDidServiceWorkerNavigationPreload(bool value) { did_service_worker_navigation_preload_ = value; } Time ResponseTime() const { return response_time_; } void SetResponseTime(Time response_time) { response_time_ = response_time; } const AtomicString& RemoteIPAddress() const { return remote_ip_address_; } void SetRemoteIPAddress(const AtomicString& value) { remote_ip_address_ = value; } unsigned short RemotePort() const { return remote_port_; } void SetRemotePort(unsigned short value) { remote_port_ = value; } const AtomicString& AlpnNegotiatedProtocol() const { return alpn_negotiated_protocol_; } void SetAlpnNegotiatedProtocol(const AtomicString& value) { alpn_negotiated_protocol_ = value; } net::HttpResponseInfo::ConnectionInfo ConnectionInfo() const { return connection_info_; } void SetConnectionInfo(net::HttpResponseInfo::ConnectionInfo value) { connection_info_ = value; } AtomicString ConnectionInfoString() const; long long EncodedDataLength() const { return encoded_data_length_; } void SetEncodedDataLength(long long value); long long EncodedBodyLength() const { return encoded_body_length_; } void SetEncodedBodyLength(long long value); long long DecodedBodyLength() const { return decoded_body_length_; } void SetDecodedBodyLength(long long value); const String& DownloadedFilePath() const { return downloaded_file_path_; } void SetDownloadedFilePath(const String&); // Extra data associated with this response. ExtraData* GetExtraData() const { return extra_data_.get(); } void SetExtraData(scoped_refptr<ExtraData> extra_data) { extra_data_ = std::move(extra_data); } unsigned MemoryUsage() const { // average size, mostly due to URL and Header Map strings return 1280; } // PlzNavigate: Even if there is redirections, only one // ResourceResponse is built: the final response. // The redirect response chain can be accessed by this function. const Vector<ResourceResponse>& RedirectResponses() const { return redirect_responses_; } void AppendRedirectResponse(const ResourceResponse&); // This method doesn't compare the all members. static bool Compare(const ResourceResponse&, const ResourceResponse&); private: void UpdateHeaderParsedState(const AtomicString& name); KURL url_; AtomicString mime_type_; long long expected_content_length_; AtomicString text_encoding_name_; unsigned connection_id_ = 0; int http_status_code_ = 0; AtomicString http_status_text_; HTTPHeaderMap http_header_fields_; // Remote IP address of the socket which fetched this resource. AtomicString remote_ip_address_; // Remote port number of the socket which fetched this resource. unsigned short remote_port_ = 0; bool was_cached_ = false; bool connection_reused_ = false; bool is_null_; mutable bool have_parsed_age_header_ = false; mutable bool have_parsed_date_header_ = false; mutable bool have_parsed_expires_header_ = false; mutable bool have_parsed_last_modified_header_ = false; // True if the resource was retrieved by the embedder in spite of // certificate errors. bool has_major_certificate_errors_ = false; // The Certificate Transparency policy compliance status of the resource. CTPolicyCompliance ct_policy_compliance_ = kCTPolicyComplianceDetailsNotAvailable; // True if the resource was retrieved with a legacy Symantec certificate which // is slated for distrust in future. bool is_legacy_symantec_cert_ = false; // The time at which the resource's certificate expires. Null if there was no // certificate. base::Time cert_validity_start_; // Was the resource fetched over SPDY. See http://dev.chromium.org/spdy bool was_fetched_via_spdy_ = false; // Was the resource fetched over an explicit proxy (HTTP, SOCKS, etc). bool was_fetched_via_proxy_ = false; // Was the resource fetched over a ServiceWorker. bool was_fetched_via_service_worker_ = false; // Was the fallback request with skip service worker flag required. bool was_fallback_required_by_service_worker_ = false; // True if service worker navigation preload was performed due to // the request for this resource. bool did_service_worker_navigation_preload_ = false; // The type of the response which was returned by the ServiceWorker. network::mojom::FetchResponseType response_type_via_service_worker_ = network::mojom::FetchResponseType::kDefault; // HTTP version used in the response, if known. HTTPVersion http_version_ = kHTTPVersionUnknown; // The security style of the resource. // This only contains a valid value when the DevTools Network domain is // enabled. (Otherwise, it contains a default value of Unknown.) SecurityStyle security_style_ = kSecurityStyleUnknown; // Security details of this request's connection. // If m_securityStyle is Unknown or Unauthenticated, this does not contain // valid data. SecurityDetails security_details_; scoped_refptr<ResourceLoadTiming> resource_load_timing_; scoped_refptr<ResourceLoadInfo> resource_load_info_; mutable CacheControlHeader cache_control_header_; mutable double age_ = 0.0; mutable double date_ = 0.0; mutable double expires_ = 0.0; mutable double last_modified_ = 0.0; // The id of the appcache this response was retrieved from, or zero if // the response was not retrieved from an appcache. long long app_cache_id_ = 0; // The manifest url of the appcache this response was retrieved from, if any. // Note: only valid for main resource responses. KURL app_cache_manifest_url_; // The multipart boundary of this response. Vector<char> multipart_boundary_; // The URL list of the response which was fetched by the ServiceWorker. // This is empty if the response was created inside the ServiceWorker. Vector<KURL> url_list_via_service_worker_; // The cache name of the CacheStorage from where the response is served via // the ServiceWorker. Null if the response isn't from the CacheStorage. String cache_storage_cache_name_; // The headers that should be exposed according to CORS. Only guaranteed // to be set if the response was fetched by a ServiceWorker. Vector<String> cors_exposed_header_names_; // The time at which the response headers were received. For cached // responses, this time could be "far" in the past. Time response_time_; // ALPN negotiated protocol of the socket which fetched this resource. AtomicString alpn_negotiated_protocol_; // Information about the type of connection used to fetch this resource. net::HttpResponseInfo::ConnectionInfo connection_info_ = net::HttpResponseInfo::ConnectionInfo::CONNECTION_INFO_UNKNOWN; // Size of the response in bytes prior to decompression. long long encoded_data_length_ = 0; // Size of the response body in bytes prior to decompression. long long encoded_body_length_ = 0; // Sizes of the response body in bytes after any content-encoding is // removed. long long decoded_body_length_ = 0; // The downloaded file path if the load streamed to a file. String downloaded_file_path_; // The handle to the downloaded file to ensure the underlying file will not // be deleted. scoped_refptr<BlobDataHandle> downloaded_file_handle_; // ExtraData associated with the response. scoped_refptr<ExtraData> extra_data_; // PlzNavigate: the redirect responses are transmitted // inside the final response. Vector<ResourceResponse> redirect_responses_; }; inline bool operator==(const ResourceResponse& a, const ResourceResponse& b) { return ResourceResponse::Compare(a, b); } inline bool operator!=(const ResourceResponse& a, const ResourceResponse& b) { return !(a == b); } // This class is needed to copy a ResourceResponse across threads, because it // has some members which cannot be transferred across threads (AtomicString // for example). // There are some rules / restrictions: // - This struct cannot contain an object that cannot be transferred across // threads (e.g., AtomicString) // - Non-simple members need explicit copying (e.g., String::IsolatedCopy, // KURL::Copy) rather than the copy constructor or the assignment operator. struct CrossThreadResourceResponseData { WTF_MAKE_NONCOPYABLE(CrossThreadResourceResponseData); USING_FAST_MALLOC(CrossThreadResourceResponseData); public: CrossThreadResourceResponseData() = default; KURL url_; String mime_type_; long long expected_content_length_; String text_encoding_name_; int http_status_code_; String http_status_text_; std::unique_ptr<CrossThreadHTTPHeaderMapData> http_headers_; scoped_refptr<ResourceLoadTiming> resource_load_timing_; bool has_major_certificate_errors_; ResourceResponse::CTPolicyCompliance ct_policy_compliance_; bool is_legacy_symantec_cert_; base::Time cert_validity_start_; ResourceResponse::SecurityStyle security_style_; ResourceResponse::SecurityDetails security_details_; // This is |certificate| from SecurityDetails since that structure should // use an AtomicString but this temporary structure is sent across threads. Vector<String> certificate_; ResourceResponse::HTTPVersion http_version_; long long app_cache_id_; KURL app_cache_manifest_url_; Vector<char> multipart_boundary_; bool was_fetched_via_spdy_; bool was_fetched_via_proxy_; bool was_fetched_via_service_worker_; bool was_fallback_required_by_service_worker_; network::mojom::FetchResponseType response_type_via_service_worker_; Vector<KURL> url_list_via_service_worker_; String cache_storage_cache_name_; bool did_service_worker_navigation_preload_; Time response_time_; String remote_ip_address_; unsigned short remote_port_; long long encoded_data_length_; long long encoded_body_length_; long long decoded_body_length_; String downloaded_file_path_; scoped_refptr<BlobDataHandle> downloaded_file_handle_; }; } // namespace blink #endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_LOADER_FETCH_RESOURCE_RESPONSE_H_
7,586
2,661
<filename>earth_enterprise/src/common/geUsers.cpp // Copyright 2017 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #include "geUsers.h" #include <cstdlib> #include <unistd.h> #include <pwd.h> #include <grp.h> #include <khException.h> #include <khGuard.h> geUserId::geUserId(uid_t uid, gid_t gid) : uid_(uid), gid_(gid) {} geUserId::geUserId(const std::string &username, const std::string &groupname) { GetUserIds(username, uid_, gid_); if (!groupname.empty()) { GetGroupId(groupname, gid_); } } void geUserId::SetEffectiveUid(const uid_t uid) { if (seteuid(uid) == -1) { throw khErrnoException(kh::tr("Could not change effective user to %1") .arg(uid)); } } void geUserId::SetEffectiveGid(const gid_t gid) { if (setegid(gid) == -1) { throw khErrnoException(kh::tr("Could not change effective group to %1") .arg(gid)); } } void geUserId::SetRealUid(const uid_t uid) { if (setuid(uid) == -1) { throw khErrnoException(kh::tr("Could not change real user to %1") .arg(uid)); } } void geUserId::SetRealGid(const gid_t gid) { if (setgid(gid) == -1) { throw khErrnoException(kh::tr("Could not change real group to %1") .arg(gid)); } } void geUserId::SwitchEffectiveToThis() const { // set gid first. setting uid will likely remove my ability to set gid SetEffectiveGid(gid_); SetEffectiveUid(uid_); } void geUserId::SwitchRealToThis() const { // set gid first. setting uid will likely remove my ability to set gid SetRealGid(gid_); SetRealUid(uid_); } void geUserId::RestoreEffectiveUserToRealUser(void) { // The following is sequence independent but depends on saved set-user-ID. SetRealUid(getuid()); SetRealGid(getgid()); } // GetUserIds attempts to get the user id and group id for the specified user. // It throws khErrnoException if the user is not found or encounters an error. // Warning: this is ugly // getpwnam_r works on goobuntu gdapper, getpwnam doesn't work, // goobuntu ghardy has the reverse issue void geUserId::GetUserIds(const std::string& username, uid_t& uid, gid_t& gid) { long bufsize = ::sysconf(_SC_GETPW_R_SIZE_MAX); char *buf = (char*)calloc(1, bufsize); khFreeGuard guard(buf); struct passwd pwinfo; struct passwd *pwinfo_ptr; do { errno = getpwnam_r(username.c_str(), &pwinfo, buf, bufsize, &pwinfo_ptr); if (errno != 0 && !pwinfo_ptr) { pwinfo_ptr = getpwnam(username.c_str()); } } while (!pwinfo_ptr && (errno == EINTR)); if (pwinfo_ptr) { uid = pwinfo.pw_uid; gid = pwinfo.pw_gid; return; } // Failed to get the user info. throw khErrnoException(kh::tr("Could not find user %1 (%d)") .arg(username.c_str(), errno)); } // GetGroupId attempts to get the group id for the specified group. // It throws khErrnoException if the group is not found or encounters an error. // Warning: this is ugly // getgrnam_r doesn't work on goobuntu ghardy, getgrnam does // goobuntu ghardy has the reverse issue void geUserId::GetGroupId(const std::string& groupname, gid_t& gid) { long bufsize = ::sysconf(_SC_GETGR_R_SIZE_MAX); char *buf = (char*)calloc(1, bufsize); khFreeGuard guard(buf); struct group grinfo; struct group *grinfo_ptr; do { errno = getgrnam_r(groupname.c_str(), &grinfo, buf, bufsize,&grinfo_ptr); if (errno != 0 && !grinfo_ptr) { grinfo_ptr = getgrnam(groupname.c_str()); } } while (!grinfo_ptr && (errno == EINTR)); if (grinfo_ptr) { gid = grinfo_ptr->gr_gid; return; } // Failed to get the group info. throw khException(kh::tr("Could not find group %1 (%2)") .arg(groupname.c_str(), errno)); }
1,753
377
/******************************************************************************* * * Copyright 2013 Impetus Infotech. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. ******************************************************************************/ package com.impetus.client.cassandra.thrift; import javax.persistence.Column; import javax.persistence.EmbeddedId; import javax.persistence.Entity; import javax.persistence.Table; /** * * @author Kuldeep.Mishra * */ @Entity @Table(name = "PHONE", schema = "CompositeCassandra@composite_pu") public class Phone { @EmbeddedId private PhoneId phoneIdentifier; @Column private Long phoneNumber; public PhoneId getPhoneId() { return phoneIdentifier; } public void setPhoneId(PhoneId phoneId) { this.phoneIdentifier = phoneId; } public Long getPhoneNumber() { return phoneNumber; } public void setPhoneNumber(Long phoneNumber) { this.phoneNumber = phoneNumber; } }
494
542
<reponame>js20166098/sxfStandard /* * Copyright ©2018 vbill.cn. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * </p> */ package cn.vbill.middleware.porter.plugin.consumer.canal.consumer; import cn.vbill.middleware.porter.common.task.consumer.ConsumeClient; import cn.vbill.middleware.porter.common.task.exception.TaskStopTriggerException; import cn.vbill.middleware.porter.core.task.consumer.AbstractDataConsumer; import cn.vbill.middleware.porter.plugin.consumer.canal.CanalConsumerConst; import cn.vbill.middleware.porter.plugin.consumer.canal.client.CanalClient; import cn.vbill.middleware.porter.plugin.converter.canal.CanalRowConverter; import com.alibaba.otter.canal.protocol.CanalEntry; import com.alibaba.otter.canal.protocol.Message; import cn.vbill.middleware.porter.core.message.MessageEvent; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; /** * canal row消费端 * * @author: zhangkewei[<EMAIL>] * @date: 2018年03月06日 11:27 * @version: V2.0 * @review: zhangkewei[<EMAIL>]/2018年03月06日 11:27 */ @SuppressWarnings("unchecked") public class CanalConsumer extends AbstractDataConsumer { public List<MessageEvent> doFetch() throws TaskStopTriggerException, InterruptedException { return consumeClient.fetch(new ConsumeClient.FetchCallback<MessageEvent, Object>() { @Override public <F, O> List<F> acceptAll(O o) throws TaskStopTriggerException { List<MessageEvent> events = new ArrayList<>(); Message msg = (Message) o; List<CanalEntry.Entry> entries; if (msg.isRaw()) { entries = new ArrayList<>(); for (ByteString e : msg.getRawEntries()) { try { entries.add(CanalEntry.Entry.parseFrom(e)); } catch (InvalidProtocolBufferException ex) { throw new TaskStopTriggerException(ex); } } } else { entries = msg.getEntries(); } //批次消息同步提交点 CanalClient.CanalPosition bucketHeader = null; List<CanalEntry.Entry> endEntries = entries.stream() .filter(e -> e.getEntryType() == CanalEntry.EntryType.TRANSACTIONEND || e.getEntryType() == CanalEntry.EntryType.TRANSACTIONBEGIN) .collect(Collectors.toList()); if (!endEntries.isEmpty()) { CanalEntry.Entry lastEndEntry = endEntries.get(endEntries.size() - 1); bucketHeader = new CanalClient.CanalPosition(msg.getId(), lastEndEntry.getHeader().getLogfileOffset(), lastEndEntry.getHeader().getLogfileName()); } else { bucketHeader = new CanalClient.CanalPosition(msg.getId()); } for (CanalEntry.Entry entry : entries) { //事务消息同步点 CanalClient.CanalPosition rowHeader = new CanalClient.CanalPosition(msg.getId(), entry.getHeader().getLogfileOffset(), entry.getHeader().getLogfileName()); List<MessageEvent> convertedObj = getConverter().convertList(bucketHeader, rowHeader, entry); if (null != convertedObj && !convertedObj.isEmpty()) { events.addAll(convertedObj); } } return (List<F>) events; } }); } @Override protected String getPluginName() { return CanalConsumerConst.CONSUMER_PLUGIN_NAME.getCode(); } @Override public String getDefaultClientType() { return CanalConsumerConst.CONSUMER_SOURCE_TYPE_NAME.getCode(); } @Override public String getDefaultMetaClientType() { return CanalConsumerConst.CONSUMER_SOURCE_TYPE_NAME.getCode(); } @Override public String getDefaultEventConverter() { return CanalRowConverter.CONVERTER_NAME; } }
2,081
410
<gh_stars>100-1000 // Copyright(c) 2017 POLYGONTEK // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http ://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "Precompiled.h" #include "BlueshiftEngine.h" #include "Profiler/Profiler.h" BE_NAMESPACE_BEGIN CmdArgs Engine::args; Str Engine::baseDir; Str Engine::searchPath; static streamOutFunc_t logFuncPtr = nullptr; static streamOutFunc_t errFuncPtr = nullptr; void Engine::InitBase(const char *baseDir, bool forceGenericSIMD, const streamOutFunc_t logFunc, const streamOutFunc_t errFunc) { // Set user-default ANSI code page obtained from the operating system setlocale(LC_ALL, ""); logFuncPtr = logFunc; errFuncPtr = errFunc; #if defined(__WIN32__) && defined(_DEBUG) //_CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF); //_CrtSetReportMode(_CRT_ERROR, _CRTDBG_MODE_DEBUG); // NOTE: can be replaced by setting '{,,ucrtbased}_crtBreakAlloc' (msvc2015) in debug watch window //_CrtSetBreakAlloc(123456); #endif ByteOrder::Init(); cmdSystem.Init(); cvarSystem.Init(); fileSystem.Init(baseDir); DetectCpu(); SIMD::Init(forceGenericSIMD); PlatformTime::Init(); Math::Init(); } void Engine::ShutdownBase() { PlatformTime::Shutdown(); SIMD::Shutdown(); fileSystem.Shutdown(); cvarSystem.Shutdown(); cmdSystem.Shutdown(); } static void RegisterEngineObjects() { Object::RegisterProperties(); Asset::RegisterProperties(); Resource::RegisterProperties(); FolderResource::RegisterProperties(); TextureResource::RegisterProperties(); Texture2DResource::RegisterProperties(); TextureCubeMapResource::RegisterProperties(); TextureSpriteResource::RegisterProperties(); ShaderResource::RegisterProperties(); MaterialResource::RegisterProperties(); FontResource::RegisterProperties(); SkeletonResource::RegisterProperties(); MeshResource::RegisterProperties(); ParticleSystemResource::RegisterProperties(); AnimResource::RegisterProperties(); FbxResource::RegisterProperties(); JointMaskResource::RegisterProperties(); AnimControllerResource::RegisterProperties(); PrefabResource::RegisterProperties(); SoundResource::RegisterProperties(); MapResource::RegisterProperties(); ScriptResource::RegisterProperties(); Component::RegisterProperties(); ComTransform::RegisterProperties(); ComRectTransform::RegisterProperties(); ComCollider::RegisterProperties(); ComBoxCollider::RegisterProperties(); ComSphereCollider::RegisterProperties(); ComCapsuleCollider::RegisterProperties(); ComConeCollider::RegisterProperties(); ComCylinderCollider::RegisterProperties(); ComMeshCollider::RegisterProperties(); ComRigidBody::RegisterProperties(); ComSensor::RegisterProperties(); ComVehicleWheel::RegisterProperties(); ComConstantForce::RegisterProperties(); ComJoint::RegisterProperties(); ComFixedJoint::RegisterProperties(); ComHingeJoint::RegisterProperties(); ComSocketJoint::RegisterProperties(); ComSliderJoint::RegisterProperties(); ComSpringJoint::RegisterProperties(); ComWheelJoint::RegisterProperties(); ComCharacterJoint::RegisterProperties(); ComCharacterController::RegisterProperties(); ComRenderable::RegisterProperties(); ComMeshRenderer::RegisterProperties(); ComStaticMeshRenderer::RegisterProperties(); ComSkinnedMeshRenderer::RegisterProperties(); ComAnimation::RegisterProperties(); ComAnimator::RegisterProperties(); ComParticleSystem::RegisterProperties(); ComTextRenderer::RegisterProperties(); ComLight::RegisterProperties(); ComEnvironmentProbe::RegisterProperties(); ComCamera::RegisterProperties(); ComCanvas::RegisterProperties(); ComImage::RegisterProperties(); ComText::RegisterProperties(); ComSpline::RegisterProperties(); ComScript::RegisterProperties(); ComAudioListener::RegisterProperties(); ComAudioSource::RegisterProperties(); Entity::RegisterProperties(); Prefab::RegisterProperties(); GameWorld::RegisterProperties(); MapRenderSettings::RegisterProperties(); TagLayerSettings::RegisterProperties(); PhysicsSettings::RegisterProperties(); PlayerSettings::RegisterProperties(); } void Engine::Init(const InitParms *initParms) { Engine::baseDir = initParms->baseDir; Engine::searchPath = initParms->searchPath; Engine::args = initParms->args; Platform::Init(); common.Init(Engine::baseDir); if (!Engine::searchPath.IsEmpty()) { fileSystem.SetSearchPath(Engine::searchPath); } RegisterEngineObjects(); } void Engine::Shutdown() { common.Shutdown(); Platform::Shutdown(); } void Engine::RunFrame(int elapsedMsec) { BE_PROFILE_SYNC_FRAME(); common.RunFrame(elapsedMsec); } void Log(int logLevel, const char *fmt, ...) { char buffer[16384]; va_list args; va_start(args, fmt); Str::vsnPrintf(buffer, COUNT_OF(buffer), fmt, args); va_end(args); (*logFuncPtr)(logLevel, buffer); } void Error(int errLevel, const char *fmt, ...) { char buffer[16384]; va_list args; va_start(args, fmt); Str::vsnPrintf(buffer, COUNT_OF(buffer), fmt, args); va_end(args); (*errFuncPtr)(errLevel, buffer); } void Assert(bool expr) { if (!expr) { BE_ERRLOG("Assert Failed\n"); assert(0); } } BE_NAMESPACE_END
2,015
315
/*! * \file * \brief Struct tools::Reorderer. */ #ifndef REORDERER_HPP_ #define REORDERER_HPP_ #include <vector> namespace aff3ct { namespace tools { /*! * \class Reorderer * * \brief Reorders a list of frames (the reordering code is dynamic). * * \tparam T: the type of data to reorder. */ template <typename T> struct Reorderer { public: /*! * \brief Applies the reordering from a list of frames. * * \param in_data: a vector of frames (all the frames have to have the same size and the number of frames have * to be a power of 2). * \param out_data: the reordered frames (interleaved regularly | e0_f0| e0_f1 | e0_f2 | e0_f3 | e1_f0 |...). * \param data_length: the size of one frame. */ static void apply(const std::vector<const T*> &in_data, T* out_data, const int data_length); /*! * \brief Reverses the reordering. * * \param in_data: the reordered frames (interleaved regularly | e0_f0| e0_f1 | e0_f2 | e0_f3 | e1_f0 |...), * the number of frames have to be a power of 2 and have to have the same size. * \param out_data: a vector of frames. * \param data_length: the size of one frame. */ static void apply_rev(const T* in_data, std::vector<T*> &out_data, const int data_length); }; /*! * \class Reorderer_static * * \brief Reorders a list of frames (the reordering code is static). * * \tparam T: the type of data to reorder. * \tparam N_FRAMES: the number of frames to reorder (have to be a power of 2). */ template <typename T, int N_FRAMES> struct Reorderer_static { public: /*! * \brief Applies the reordering from a list of frames. * * \param in_data: a vector of frames (all the frames have to have the same size and the number of frames have * to be a power of 2). * \param out_data: the reordered frames (interleaved regularly | e0_f0| e0_f1 | e0_f2 | e0_f3 | e1_f0 |...). * \param data_length: the size of one frame. */ static void apply(const std::vector<const T*> &in_data, T* out_data, const int data_length); /*! * \brief Reverses the reordering. * * \param in_data: the reordered frames (interleaved regularly | e0_f0| e0_f1 | e0_f2 | e0_f3 | e1_f0 |...), * the number of frames have to be a power of 2 and have to have the same size. * \param out_data: a vector of frames. * \param data_length: the size of one frame. */ static void apply_rev(const T* in_data, std::vector<T*> &out_data, const int data_length); }; } } #ifndef DOXYGEN_SHOULD_SKIP_THIS #include "Tools/Perf/Reorderer/Reorderer.hxx" #endif #endif /* REORDERER_HPP_ */
1,061
867
<reponame>greedyuser/kur<filename>kur/containers/layers/repeat.py """ Copyright 2017 Deepgram Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from . import Layer, ParsingError ############################################################################### class Repeat(Layer): # pylint: disable=too-few-public-methods """ A layer which repeats its input a fixed number of times. """ ########################################################################### def __init__(self, *args, **kwargs): """ Creates a new repeat layer. """ super().__init__(*args, **kwargs) self.count = None ########################################################################### def _parse(self, engine): """ Parse the layer. """ if isinstance(self.args, dict): self.count = engine.evaluate(self.args['count'], recursive=True) else: self.count = self.args try: self.count = int(self.count) except ValueError: raise ParsingError('Key "count" in Repeat layer must be an ' 'integer. Received: {}'.format(self.count)) if self.count < 1: raise ParsingError('Key "count" in Repeat layer must be >= 1.') ########################################################################### def _build(self, model): """ Create the backend-specific placeholder. """ backend = model.get_backend() if backend.get_name() == 'keras': import keras.layers as L # pylint: disable=import-error yield L.RepeatVector( self.count, name=self.name ) elif backend.get_name() == 'pytorch': def connect(inputs): """ Connects the layer. """ assert len(inputs) == 1 ndim = len(inputs[0]['shape']) sizes = (1, self.count) + (1, )*ndim return { 'shape' : self.shape([inputs[0]['shape']]), 'layer' : model.data.add_operation( lambda x: x.unsqueeze(1).repeat(*sizes) )(inputs[0]['layer']) } yield connect else: raise ValueError( 'Unknown or unsupported backend: {}'.format(backend)) ########################################################################### def shape(self, input_shapes): """ Returns the output shape of this layer for a given input shape. """ if len(input_shapes) > 1: raise ValueError('Repeat layers only take a single input.') input_shape = input_shapes[0] if len(input_shape) != 1: raise ValueError('Repeat layers only accept flat (1D) inputs.') return (self.count, input_shape[0]) ### EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF
1,008
975
<gh_stars>100-1000 # Copyright (C) 2015 Nippon Telegraph and Telephone Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import functools import json import logging from nose.tools import eq_ import os import sys import unittest from ryu.lib import ofctl_v1_0 from ryu.lib import ofctl_v1_2 from ryu.lib import ofctl_v1_3 from ryu.lib import ofctl_v1_4 from ryu.lib import ofctl_v1_5 from ryu.ofproto import ofproto_parser from ryu.ofproto.ofproto_protocol import ProtocolDesc from ryu.tests import test_lib LOG = logging.getLogger(__name__) class DummyDatapath(ProtocolDesc): def __init__(self, version): super(DummyDatapath, self).__init__(version) self.id = 1 # XXX self.request_msg = None self.reply_msg = None self.waiters = None @staticmethod def set_xid(msg): msg.set_xid(0) return 0 def send_msg(self, msg): msg.serialize() self.request_msg = msg if self.reply_msg: lock, msgs = self.waiters[self.id][msg.xid] msgs.append(self.reply_msg) del self.waiters[self.id][msg.xid] lock.set() def set_reply(self, msg, waiters): self.reply_msg = msg self.waiters = waiters class Test_ofctl(unittest.TestCase): def _test(self, name, dp, method, args, request, reply, expected): print('processing %s ...' % name) waiters = {} dp.set_reply(reply, waiters) if reply: output = method(dp=dp, waiters=waiters, **args) else: output = method(dp=dp, **args) # expected message <--> sent message request.serialize() try: eq_(json.dumps(request.to_jsondict(), sort_keys=True), json.dumps(dp.request_msg.to_jsondict(), sort_keys=True)) except AssertionError as e: # For debugging json.dump(dp.request_msg.to_jsondict(), open('/tmp/' + name + '_request.json', 'w'), indent=3, sort_keys=True) raise e # expected output <--> return of ofctl def _remove(d, names): def f(x): return _remove(x, names) if isinstance(d, list): return list(map(f, d)) if isinstance(d, dict): d2 = {} for k, v in d.items(): if k in names: continue d2[k] = f(v) return d2 return d expected = _remove(expected, ['len', 'length']) output = _remove(output, ['len', 'length']) try: eq_(json.dumps(expected, sort_keys=True), json.dumps(output, sort_keys=True)) except AssertionError as e: # For debugging json.dump(output, open('/tmp/' + name + '_reply.json', 'w'), indent=4) raise e def _add_tests(): _ofp_vers = { 'of10': 0x01, 'of12': 0x03, 'of13': 0x04, 'of14': 0x05, 'of15': 0x06, } _test_cases = { 'of10': [ { 'method': ofctl_v1_0.mod_flow_entry, 'request': '1-2-ofp_flow_mod.packet.json', 'reply': None }, ], 'of12': [ { 'method': ofctl_v1_2.get_desc_stats, 'request': '3-24-ofp_desc_stats_request.packet.json', 'reply': '3-0-ofp_desc_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_queue_stats, 'request': '3-37-ofp_queue_stats_request.packet.json', 'reply': '3-38-ofp_queue_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_queue_stats, 'request': 'lib-ofctl-ofp_queue_stats_request.packet1.json', 'reply': '3-38-ofp_queue_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_queue_stats, 'request': 'lib-ofctl-ofp_queue_stats_request.packet2.json', 'reply': '3-38-ofp_queue_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_queue_stats, 'request': 'lib-ofctl-ofp_queue_stats_request.packet3.json', 'reply': '3-38-ofp_queue_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_queue_config, 'request': '3-35-ofp_queue_get_config_request.packet.json', 'reply': '3-36-ofp_queue_get_config_reply.packet.json' }, { 'method': ofctl_v1_2.get_queue_config, 'request': 'lib-ofctl-ofp_queue_get_config_request.packet.json', 'reply': '3-36-ofp_queue_get_config_reply.packet.json' }, { 'method': ofctl_v1_2.get_flow_stats, 'request': '3-11-ofp_flow_stats_request.packet.json', 'reply': '3-12-ofp_flow_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_aggregate_flow_stats, 'request': '3-25-ofp_aggregate_stats_request.packet.json', 'reply': '3-26-ofp_aggregate_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_table_stats, 'request': '3-27-ofp_table_stats_request.packet.json', 'reply': '3-28-ofp_table_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_port_stats, 'request': '3-29-ofp_port_stats_request.packet.json', 'reply': '3-30-ofp_port_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_port_stats, 'request': 'lib-ofctl-ofp_port_stats_request.packet.json', 'reply': '3-30-ofp_port_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_group_stats, 'request': '3-61-ofp_group_stats_request.packet.json', 'reply': '3-62-ofp_group_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_group_stats, 'request': 'lib-ofctl-ofp_group_stats_request.packet.json', 'reply': '3-62-ofp_group_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_group_features, 'request': '3-31-ofp_group_features_stats_request.packet.json', 'reply': '3-32-ofp_group_features_stats_reply.packet.json' }, { 'method': ofctl_v1_2.get_group_desc, 'request': '3-33-ofp_group_desc_stats_request.packet.json', 'reply': '3-34-ofp_group_desc_stats_reply.packet.json' }, # In OpenFlow 1.2, ofp_port_desc is not defined. # We use ofp_features_request to get ports description instead. { 'method': ofctl_v1_2.get_port_desc, 'request': '3-5-ofp_features_request.packet.json', 'reply': '3-6-ofp_features_reply.packet.json' }, { 'method': ofctl_v1_2.mod_flow_entry, 'request': '3-2-ofp_flow_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_2.mod_group_entry, 'request': '3-21-ofp_group_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_2.mod_port_behavior, 'request': '3-22-ofp_port_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_2.send_experimenter, 'request': '3-16-ofp_experimenter.packet.json', 'reply': None }, ], 'of13': [ { 'method': ofctl_v1_3.get_desc_stats, 'request': '4-24-ofp_desc_request.packet.json', 'reply': '4-0-ofp_desc_reply.packet.json' }, { 'method': ofctl_v1_3.get_queue_stats, 'request': '4-37-ofp_queue_stats_request.packet.json', 'reply': '4-38-ofp_queue_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_queue_stats, 'request': 'lib-ofctl-ofp_queue_stats_request.packet1.json', 'reply': '4-38-ofp_queue_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_queue_stats, 'request': 'lib-ofctl-ofp_queue_stats_request.packet2.json', 'reply': '4-38-ofp_queue_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_queue_stats, 'request': 'lib-ofctl-ofp_queue_stats_request.packet3.json', 'reply': '4-38-ofp_queue_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_queue_config, 'request': '4-35-ofp_queue_get_config_request.packet.json', 'reply': '4-36-ofp_queue_get_config_reply.packet.json' }, { 'method': ofctl_v1_3.get_queue_config, 'request': 'lib-ofctl-ofp_queue_get_config_request.packet.json', 'reply': '4-36-ofp_queue_get_config_reply.packet.json' }, { 'method': ofctl_v1_3.get_flow_stats, 'request': '4-11-ofp_flow_stats_request.packet.json', 'reply': '4-12-ofp_flow_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_aggregate_flow_stats, 'request': '4-25-ofp_aggregate_stats_request.packet.json', 'reply': '4-26-ofp_aggregate_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_table_stats, 'request': '4-27-ofp_table_stats_request.packet.json', 'reply': '4-28-ofp_table_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_table_features, 'request': 'lib-ofctl-ofp_table_features_request.packet.json', 'reply': '4-56-ofp_table_features_reply.packet.json' }, { 'method': ofctl_v1_3.get_port_stats, 'request': '4-29-ofp_port_stats_request.packet.json', 'reply': '4-30-ofp_port_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_port_stats, 'request': 'lib-ofctl-ofp_port_stats_request.packet.json', 'reply': '4-30-ofp_port_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_meter_stats, 'request': '4-49-ofp_meter_stats_request.packet.json', 'reply': '4-50-ofp_meter_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_meter_stats, 'request': 'lib-ofctl-ofp_meter_stats_request.packet.json', 'reply': '4-50-ofp_meter_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_meter_features, 'request': '4-51-ofp_meter_features_request.packet.json', 'reply': '4-52-ofp_meter_features_reply.packet.json' }, { 'method': ofctl_v1_3.get_meter_config, 'request': '4-47-ofp_meter_config_request.packet.json', 'reply': '4-48-ofp_meter_config_reply.packet.json' }, { 'method': ofctl_v1_3.get_meter_config, 'request': 'lib-ofctl-ofp_meter_config_request.packet.json', 'reply': '4-48-ofp_meter_config_reply.packet.json' }, { 'method': ofctl_v1_3.get_group_stats, 'request': '4-57-ofp_group_stats_request.packet.json', 'reply': '4-58-ofp_group_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_group_stats, 'request': 'lib-ofctl-ofp_group_stats_request.packet.json', 'reply': '4-58-ofp_group_stats_reply.packet.json' }, { 'method': ofctl_v1_3.get_group_features, 'request': '4-31-ofp_group_features_request.packet.json', 'reply': '4-32-ofp_group_features_reply.packet.json' }, { 'method': ofctl_v1_3.get_group_desc, 'request': '4-33-ofp_group_desc_request.packet.json', 'reply': '4-34-ofp_group_desc_reply.packet.json' }, { 'method': ofctl_v1_3.get_port_desc, 'request': '4-53-ofp_port_desc_request.packet.json', 'reply': '4-54-ofp_port_desc_reply.packet.json' }, { 'method': ofctl_v1_3.mod_flow_entry, 'request': '4-2-ofp_flow_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_3.mod_meter_entry, 'request': '4-45-ofp_meter_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_3.mod_group_entry, 'request': '4-21-ofp_group_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_3.mod_port_behavior, 'request': '4-22-ofp_port_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_3.send_experimenter, 'request': '4-16-ofp_experimenter.packet.json', 'reply': None }, ], 'of14': [ { 'method': ofctl_v1_4.get_desc_stats, 'request': '5-24-ofp_desc_request.packet.json', 'reply': '5-0-ofp_desc_reply.packet.json' }, { 'method': ofctl_v1_4.get_queue_stats, 'request': '5-35-ofp_queue_stats_request.packet.json', 'reply': '5-36-ofp_queue_stats_reply.packet.json' }, { 'method': ofctl_v1_4.get_queue_desc, 'request': '5-63-ofp_queue_desc_request.packet.json', 'reply': '5-64-ofp_queue_desc_reply.packet.json' }, { 'method': ofctl_v1_4.get_flow_stats, 'request': '5-11-ofp_flow_stats_request.packet.json', 'reply': '5-12-ofp_flow_stats_reply.packet.json' }, { 'method': ofctl_v1_4.get_aggregate_flow_stats, 'request': '5-25-ofp_aggregate_stats_request.packet.json', 'reply': '5-26-ofp_aggregate_stats_reply.packet.json' }, { 'method': ofctl_v1_4.get_table_stats, 'request': '5-27-ofp_table_stats_request.packet.json', 'reply': '5-28-ofp_table_stats_reply.packet.json' }, { 'method': ofctl_v1_4.get_table_features, 'request': 'lib-ofctl-ofp_table_features_request.packet.json', 'reply': '5-54-ofp_table_features_reply.packet.json' }, { 'method': ofctl_v1_4.get_port_stats, 'request': '5-29-ofp_port_stats_request.packet.json', 'reply': '5-30-ofp_port_stats_reply.packet.json' }, { 'method': ofctl_v1_4.get_meter_stats, 'request': '5-47-ofp_meter_stats_request.packet.json', 'reply': '5-48-ofp_meter_stats_reply.packet.json' }, { 'method': ofctl_v1_4.get_meter_features, 'request': '5-49-ofp_meter_features_request.packet.json', 'reply': '5-50-ofp_meter_features_reply.packet.json' }, { 'method': ofctl_v1_4.get_meter_config, 'request': '5-45-ofp_meter_config_request.packet.json', 'reply': '5-46-ofp_meter_config_reply.packet.json' }, { 'method': ofctl_v1_4.get_group_stats, 'request': '5-55-ofp_group_stats_request.packet.json', 'reply': '5-56-ofp_group_stats_reply.packet.json' }, { 'method': ofctl_v1_4.get_group_features, 'request': '5-31-ofp_group_features_request.packet.json', 'reply': '5-32-ofp_group_features_reply.packet.json' }, { 'method': ofctl_v1_4.get_group_desc, 'request': '5-33-ofp_group_desc_request.packet.json', 'reply': '5-34-ofp_group_desc_reply.packet.json' }, { 'method': ofctl_v1_4.get_port_desc, 'request': '5-51-ofp_port_desc_request.packet.json', 'reply': '5-52-ofp_port_desc_reply.packet.json' }, { 'method': ofctl_v1_4.mod_flow_entry, 'request': '5-2-ofp_flow_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_4.mod_meter_entry, 'request': '5-43-ofp_meter_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_4.mod_group_entry, 'request': '5-21-ofp_group_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_4.mod_port_behavior, 'request': '5-22-ofp_port_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_4.send_experimenter, 'request': '5-16-ofp_experimenter.packet.json', 'reply': None }, ], 'of15': [ { 'method': ofctl_v1_5.get_desc_stats, 'request': 'libofproto-OFP15-desc_request.packet.json', 'reply': 'libofproto-OFP15-desc_reply.packet.json' }, { 'method': ofctl_v1_5.get_queue_stats, 'request': 'lib-ofctl-ofp_queue_stats_request.packet.json', 'reply': 'libofproto-OFP15-queue_stats_reply.packet.json' }, { 'method': ofctl_v1_5.get_queue_desc, 'request': 'libofproto-OFP15-queue_desc_request.packet.json', 'reply': 'libofproto-OFP15-queue_desc_reply.packet.json' }, { 'method': ofctl_v1_5.get_flow_stats, 'request': 'libofproto-OFP15-flow_stats_request.packet.json', 'reply': 'libofproto-OFP15-flow_stats_reply.packet.json' }, { 'method': ofctl_v1_5.get_flow_desc_stats, 'request': 'libofproto-OFP15-flow_desc_request.packet.json', 'reply': 'libofproto-OFP15-flow_desc_reply.packet.json' }, { 'method': ofctl_v1_5.get_flow_desc_stats, 'request': 'lib-ofctl-OFP15-flow_desc_request.packet.json', 'reply': 'lib-ofctl-OFP15-flow_desc_reply.packet.json' }, { 'method': ofctl_v1_5.get_aggregate_flow_stats, 'request': 'libofproto-OFP15-aggregate_stats_request.packet.json', 'reply': 'libofproto-OFP15-aggregate_stats_reply.packet.json' }, { 'method': ofctl_v1_5.get_table_stats, 'request': 'libofproto-OFP15-table_stats_request.packet.json', 'reply': 'libofproto-OFP15-table_stats_reply.packet.json' }, { 'method': ofctl_v1_5.get_table_features, 'request': 'lib-ofctl-ofp_table_features_request.packet.json', 'reply': 'libofproto-OFP15-table_features_reply.packet.json' }, { 'method': ofctl_v1_5.get_port_stats, 'request': 'libofproto-OFP15-port_stats_request.packet.json', 'reply': 'libofproto-OFP15-port_stats_reply.packet.json' }, { 'method': ofctl_v1_5.get_meter_stats, 'request': 'libofproto-OFP15-meter_stats_request.packet.json', 'reply': 'libofproto-OFP15-meter_stats_reply.packet.json' }, { 'method': ofctl_v1_5.get_meter_features, 'request': 'libofproto-OFP15-meter_features_request.packet.json', 'reply': 'libofproto-OFP15-meter_features_reply.packet.json' }, { 'method': ofctl_v1_5.get_meter_desc, 'request': 'libofproto-OFP15-meter_desc_request.packet.json', 'reply': 'libofproto-OFP15-meter_desc_reply.packet.json' }, { 'method': ofctl_v1_5.get_group_stats, 'request': 'libofproto-OFP15-group_stats_request.packet.json', 'reply': 'libofproto-OFP15-group_stats_reply.packet.json' }, { 'method': ofctl_v1_5.get_group_features, 'request': 'libofproto-OFP15-group_features_request.packet.json', 'reply': 'libofproto-OFP15-group_features_reply.packet.json' }, { 'method': ofctl_v1_5.get_group_desc, 'request': 'libofproto-OFP15-group_desc_request.packet.json', 'reply': 'libofproto-OFP15-group_desc_reply.packet.json' }, { 'method': ofctl_v1_5.get_port_desc, 'request': 'libofproto-OFP15-port_desc_request.packet.json', 'reply': 'libofproto-OFP15-port_desc_reply.packet.json' }, { 'method': ofctl_v1_5.mod_flow_entry, 'request': 'libofproto-OFP15-flow_mod_no_nx.packet.json', 'reply': None }, { 'method': ofctl_v1_5.mod_flow_entry, 'request': 'lib-ofctl-OFP15-flow_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_5.mod_meter_entry, 'request': 'libofproto-OFP15-meter_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_5.mod_group_entry, 'request': 'libofproto-OFP15-group_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_5.mod_port_behavior, 'request': 'libofproto-OFP15-port_mod.packet.json', 'reply': None }, { 'method': ofctl_v1_5.send_experimenter, 'request': 'libofproto-OFP15-experimenter.packet.json', 'reply': None } ], } def _jsonfile_to_msg(datapath, jsonfile): return ofproto_parser.ofp_msg_from_jsondict( datapath, json.load(open(jsonfile))) this_dir = os.path.dirname(sys.modules[__name__].__file__) parser_json_root = os.path.join(this_dir, '../ofproto/json/') ofctl_json_root = os.path.join(this_dir, 'ofctl_json/') for ofp_ver, tests in _test_cases.items(): dp = DummyDatapath(_ofp_vers[ofp_ver]) parser_json_dir = os.path.join(parser_json_root, ofp_ver) ofctl_json_dir = os.path.join(ofctl_json_root, ofp_ver) for test in tests: name = 'test_ofctl_' + ofp_ver + '_' + test['request'] print('adding %s ...' % name) args = {} args_json_path = os.path.join(ofctl_json_dir, test['request']) if os.path.exists(args_json_path): args = json.load(open(args_json_path)) request = _jsonfile_to_msg( dp, os.path.join(parser_json_dir, test['request'])) reply = None expected = None if test['reply']: reply = _jsonfile_to_msg( dp, os.path.join(parser_json_dir, test['reply'])) expected = json.load( open(os.path.join(ofctl_json_dir, test['reply']))) f = functools.partial( Test_ofctl._test, name=name, dp=dp, method=test['method'], args=args, request=request, reply=reply, expected=expected) test_lib.add_method(Test_ofctl, name, f) _add_tests() if __name__ == "__main__": unittest.main()
14,995
326
package org.araymond.joal.web.messages.outgoing; import org.araymond.joal.web.messages.outgoing.impl.announce.FailedToAnnouncePayload; import org.araymond.joal.web.messages.outgoing.impl.announce.SuccessfullyAnnouncePayload; import org.araymond.joal.web.messages.outgoing.impl.announce.TooManyAnnouncesFailedPayload; import org.araymond.joal.web.messages.outgoing.impl.announce.WillAnnouncePayload; import org.araymond.joal.web.messages.outgoing.impl.config.ConfigHasBeenLoadedPayload; import org.araymond.joal.web.messages.outgoing.impl.config.ConfigIsInDirtyStatePayload; import org.araymond.joal.web.messages.outgoing.impl.config.InvalidConfigPayload; import org.araymond.joal.web.messages.outgoing.impl.config.ListOfClientFilesPayload; import org.araymond.joal.web.messages.outgoing.impl.files.FailedToAddTorrentFilePayload; import org.araymond.joal.web.messages.outgoing.impl.files.TorrentFileAddedPayload; import org.araymond.joal.web.messages.outgoing.impl.files.TorrentFileDeletedPayload; import org.araymond.joal.web.messages.outgoing.impl.global.state.GlobalSeedStartedPayload; import org.araymond.joal.web.messages.outgoing.impl.global.state.GlobalSeedStoppedPayload; import org.araymond.joal.web.messages.outgoing.impl.speed.SeedingSpeedHasChangedPayload; import java.util.HashMap; import java.util.Map; /** * Created by raymo on 29/06/2017. */ public enum StompMessageTypes { //announce FAILED_TO_ANNOUNCE(FailedToAnnouncePayload.class), SUCCESSFULLY_ANNOUNCE(SuccessfullyAnnouncePayload.class), TOO_MANY_ANNOUNCES_FAILED(TooManyAnnouncesFailedPayload.class), WILL_ANNOUNCE(WillAnnouncePayload.class), //config CONFIG_HAS_BEEN_LOADED(ConfigHasBeenLoadedPayload.class), CONFIG_IS_IN_DIRTY_STATE(ConfigIsInDirtyStatePayload.class), INVALID_CONFIG(InvalidConfigPayload.class), LIST_OF_CLIENT_FILES(ListOfClientFilesPayload.class), // files TORRENT_FILE_ADDED(TorrentFileAddedPayload.class), TORRENT_FILE_DELETED(TorrentFileDeletedPayload.class), FAILED_TO_ADD_TORRENT_FILE(FailedToAddTorrentFilePayload.class), //global.state GLOBAL_SEED_STARTED(GlobalSeedStartedPayload.class), GLOBAL_SEED_STOPPED(GlobalSeedStoppedPayload.class), // speed SEEDING_SPEED_HAS_CHANGED(SeedingSpeedHasChangedPayload.class); private static final Map<Class<? extends MessagePayload>, StompMessageTypes> classToType = new HashMap<>(); private final Class<? extends MessagePayload> clazz; static { for (final StompMessageTypes type : StompMessageTypes.values()) { classToType.put(type.clazz, type); } } StompMessageTypes(final Class<? extends MessagePayload> clazz) { this.clazz = clazz; } static StompMessageTypes typeFor(final MessagePayload payload) { return typeFor(payload.getClass()); } static StompMessageTypes typeFor(final Class<? extends MessagePayload> clazz) { final StompMessageTypes type = classToType.get(clazz); if (type == null) { throw new IllegalArgumentException(clazz.getSimpleName() + " is not mapped with a StompMessageType."); } return type; } }
1,288
577
package org.python.core; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.python.core.Opcode.*; public class PyBytecode extends PyBaseCode implements Traverseproc { // for debugging public static boolean defaultDebug = false; private static boolean debug; private static PyObject dis, opname; private int count = 0; // total number of opcodes run so far in this code obj private int maxCount = -1; // if -1, no cap on number of opcodes than can be run private static synchronized PyObject get_dis() { if (dis == null) { dis = __builtin__.__import__("dis"); } return dis; } private static synchronized PyObject get_opname() { if (opname == null) { opname = get_dis().__getattr__("opname"); } return opname; } public static void _allDebug(boolean setting) { defaultDebug = setting; } public PyObject _debug(int maxCount) { debug = maxCount > 0; this.maxCount = maxCount; return Py.None; } // end debugging public final static int CO_MAXBLOCKS = 20; // same as in CPython public final byte[] co_code; // widened to char to avoid signed byte issues public final PyObject[] co_consts; public final String[] co_names; public final int co_stacksize; public final byte[] co_lnotab; private final static int CALL_FLAG_VAR = 1; private final static int CALL_FLAG_KW = 2; // follows new.code's interface public PyBytecode(int argcount, int nlocals, int stacksize, int flags, String codestring, PyObject[] constants, String[] names, String varnames[], String filename, String name, int firstlineno, String lnotab) { this(argcount, nlocals, stacksize, flags, codestring, constants, names, varnames, filename, name, firstlineno, lnotab, null, null); } // XXX - intern names HERE instead of in marshal public PyBytecode(int argcount, int nlocals, int stacksize, int flags, String codestring, PyObject[] constants, String[] names, String varnames[], String filename, String name, int firstlineno, String lnotab, String[] cellvars, String[] freevars) { debug = defaultDebug; if (argcount < 0) { throw Py.ValueError("code: argcount must not be negative"); } else if (nlocals < 0) { throw Py.ValueError("code: nlocals must not be negative"); } co_argcount = nargs = argcount; co_varnames = varnames; co_nlocals = nlocals; // maybe assert = varnames.length; co_filename = filename; co_firstlineno = firstlineno; co_cellvars = cellvars; co_freevars = freevars; co_name = name; co_flags = new CompilerFlags(flags); varargs = co_flags.isFlagSet(CodeFlag.CO_VARARGS); varkwargs = co_flags.isFlagSet(CodeFlag.CO_VARKEYWORDS); co_stacksize = stacksize; co_consts = constants; co_names = names; co_code = getBytes(codestring); co_lnotab = getBytes(lnotab); } private static final String[] __members__ = { "co_name", "co_argcount", "co_varnames", "co_filename", "co_firstlineno", "co_flags", "co_cellvars", "co_freevars", "co_nlocals", "co_code", "co_consts", "co_names", "co_lnotab", "co_stacksize" }; @Override public PyObject __dir__() { PyString members[] = new PyString[__members__.length]; for (int i = 0; i < __members__.length; i++) { members[i] = new PyString(__members__[i]); } return new PyList(members); } private void throwReadonly(String name) { for (int i = 0; i < __members__.length; i++) { if (__members__[i] == name) { throw Py.TypeError("readonly attribute"); } } throw Py.AttributeError(name); } @Override public void __setattr__(String name, PyObject value) { // no writable attributes throwReadonly(name); } @Override public void __delattr__(String name) { throwReadonly(name); } private static PyTuple toPyStringTuple(String[] ar) { if (ar == null) { return Py.EmptyTuple; } int sz = ar.length; PyString[] pystr = new PyString[sz]; for (int i = 0; i < sz; i++) { pystr[i] = new PyString(ar[i]); } return new PyTuple(pystr); } @Override public PyObject __findattr_ex__(String name) { // have to craft co_varnames specially if (name == "co_varnames") { return toPyStringTuple(co_varnames); } if (name == "co_cellvars") { return toPyStringTuple(co_cellvars); } if (name == "co_freevars") { return toPyStringTuple(co_freevars); } if (name == "co_filename") { return Py.fileSystemEncode(co_filename); // bytes object expected by clients } if (name == "co_name") { return new PyString(co_name); } if (name == "co_code") { return new PyString(getString(co_code)); } if (name == "co_lnotab") { return new PyString(getString(co_lnotab)); } if (name == "co_consts") { return new PyTuple(co_consts); } if (name == "co_flags") { return Py.newInteger(co_flags.toBits()); } return super.__findattr_ex__(name); } enum Why { NOT, /* No error */ EXCEPTION, /* Exception occurred */ RERAISE, /* Exception re-raised by 'finally' */ RETURN, /* 'return' statement */ BREAK, /* 'break' statement */ CONTINUE, /* 'continue' statement */ YIELD /* 'yield' operator */ }; // to enable why's to be stored on a PyStack @Untraversable private static class PyStackWhy extends PyObject { Why why; PyStackWhy(Why why) { this.why = why; } @Override public String toString() { return why.toString(); } } private static class PyStackException extends PyObject implements Traverseproc { PyException exception; PyStackException(PyException exception) { this.exception = exception; } @Override public String toString() { return String.format("PyStackException<%s,%s,%.100s>", exception.type, exception.value, exception.traceback); } /* Traverseproc implementation */ @Override public int traverse(Visitproc visit, Object arg) { return exception != null ? exception.traverse(visit, arg) : 0; } @Override public boolean refersDirectlyTo(PyObject ob) { return ob != null && exception.refersDirectlyTo(ob); } } private static String stringify_blocks(PyFrame f) { if (f.f_exits == null || f.f_lineno == 0) { return "[]"; } StringBuilder buf = new StringBuilder("["); for (int i = 0; i < f.f_exits.length; i++) { buf.append(f.f_exits[i].toString()); if (i < f.f_exits.length - 1) { buf.append(", "); } } buf.append("]"); return buf.toString(); } private void print_debug(int count, int next_instr, int line, int opcode, int oparg, PyStack stack, PyFrame f) { if (debug) { System.err.println(co_name + " " + line + ":" + count + "," + f.f_lasti + "> " + opcode+" "+ get_opname().__getitem__(Py.newInteger(opcode)) + (opcode >= HAVE_ARGUMENT ? " " + oparg : "") + ", stack: " + stack.toString() + ", blocks: " + stringify_blocks(f)); } } // the following code exploits the fact that f_exits is only used by code compiled to Java bytecode; // in their place we implement the block stack for PBC-VM, as mapped below in the comments of pushBlock private static PyTryBlock popBlock(PyFrame f) { return (PyTryBlock)(((PyList)f.f_exits[0]).pop()); } private static void pushBlock(PyFrame f, PyTryBlock block) { if (f.f_exits == null) { // allocate in the frame where they can fit! TODO consider supporting directly in the frame f.f_exits = new PyObject[1]; // f_blockstack in CPython - a simple ArrayList might be best f.f_exits[0] = new PyList(); } ((PyList)f.f_exits[0]).append(block); } private boolean blocksLeft(PyFrame f) { if (f.f_exits != null) { return ((PyList)f.f_exits[0]).__nonzero__(); } else { return false; } } @Override protected PyObject interpret(PyFrame f, ThreadState ts) { final PyStack stack = new PyStack(co_stacksize); int next_instr = -1; int opcode; /* Current opcode */ int oparg = 0; /* Current opcode argument, if any */ Why why = Why.NOT; PyObject retval = null; LineCache lineCache = null; int last_line = -1; int line = 0; // XXX - optimization opportunities // 1. consider detaching the setting/getting of frame fields to improve performance, instead do this // in a shadow version of the frame that we copy back to on entry/exit and downcalls if (debug) { System.err.println(co_name + ":" + f.f_lasti + "/" + co_code.length + ", cells:" + Arrays.toString(co_cellvars) + ", free:" + Arrays.toString(co_freevars)); int i = 0; for (String cellvar : co_cellvars) { System.err.println(cellvar + " = " + f.f_env[i++]); } for (String freevar : co_freevars) { System.err.println(freevar + " = " + f.f_env[i++]); } get_dis().invoke("disassemble", this); } if (f.f_lasti >= co_code.length) { throw Py.SystemError(""); // XXX - chose an appropriate error!!! } next_instr = f.f_lasti; // the restore stack aspects should occur ONLY after a yield boolean checkGeneratorInput = false; if (f.f_savedlocals != null) { for (int i = 0; i < f.f_savedlocals.length; i++) { PyObject v = (PyObject) (f.f_savedlocals[i]); stack.push(v); } checkGeneratorInput = true; f.f_savedlocals = null; } while (!debug || (maxCount == -1 || count < maxCount)) { // XXX - replace with while(true) if (f.tracefunc != null || debug) { if (lineCache == null) { lineCache = new LineCache(); if (debug) { System.err.println("LineCache: " + lineCache.toString()); } } line = lineCache.getline(next_instr); // XXX - should also return the range this is valid to avoid an unnecessary bisect if (line != last_line) { f.setline(line); } } try { if (checkGeneratorInput) { checkGeneratorInput = false; Object generatorInput = f.getGeneratorInput(); if (generatorInput instanceof PyException) { throw (PyException) generatorInput; } stack.push((PyObject) generatorInput); } opcode = getUnsigned(co_code, next_instr); if (opcode >= HAVE_ARGUMENT) { next_instr += 2; oparg = (getUnsigned(co_code, next_instr) << 8) + getUnsigned(co_code, next_instr - 1); } print_debug(count, next_instr, line, opcode, oparg, stack, f); count += 1; next_instr += 1; f.f_lasti = next_instr; switch (opcode) { case NOP: break; case LOAD_FAST: stack.push(f.getlocal(oparg)); break; case LOAD_CONST: stack.push(co_consts[oparg]); break; case STORE_FAST: f.setlocal(oparg, stack.pop()); break; case POP_TOP: stack.pop(); break; case ROT_TWO: stack.rot2(); break; case ROT_THREE: stack.rot3(); break; case ROT_FOUR: stack.rot4(); break; case DUP_TOP: stack.dup(); break; case DUP_TOPX: { if (oparg == 2 || oparg == 3) { stack.dup(oparg); } else { throw Py.RuntimeError("invalid argument to DUP_TOPX" + " (bytecode corruption?)"); } break; } case UNARY_POSITIVE: stack.push(stack.pop().__pos__()); break; case UNARY_NEGATIVE: stack.push(stack.pop().__neg__()); break; case UNARY_NOT: stack.push(stack.pop().__not__()); break; case UNARY_CONVERT: stack.push(stack.pop().__repr__()); break; case UNARY_INVERT: stack.push(stack.pop().__invert__()); break; case BINARY_POWER: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._pow(b)); break; } case BINARY_MULTIPLY: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._mul(b)); break; } case BINARY_DIVIDE: { PyObject b = stack.pop(); PyObject a = stack.pop(); if (!co_flags.isFlagSet(CodeFlag.CO_FUTURE_DIVISION)) { stack.push(a._div(b)); } else { stack.push(a._truediv(b)); } break; } case BINARY_TRUE_DIVIDE: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._truediv(b)); break; } case BINARY_FLOOR_DIVIDE: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._floordiv(b)); break; } case BINARY_MODULO: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._mod(b)); break; } case BINARY_ADD: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._add(b)); break; } case BINARY_SUBTRACT: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._sub(b)); break; } case BINARY_SUBSCR: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a.__getitem__(b)); break; } case BINARY_LSHIFT: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._lshift(b)); break; } case BINARY_RSHIFT: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._rshift(b)); break; } case BINARY_AND: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._and(b)); break; } case BINARY_XOR: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._xor(b)); break; } case BINARY_OR: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._or(b)); break; } case LIST_APPEND: { PyObject b = stack.pop(); PyList a = (PyList) stack.top(oparg); a.append(b); break; } case SET_ADD: { PyObject b = stack.pop(); PySet a = (PySet) stack.top(oparg); a.add(b); break; } case INPLACE_POWER: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._ipow(b)); break; } case INPLACE_MULTIPLY: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._imul(b)); break; } case INPLACE_DIVIDE: { PyObject b = stack.pop(); PyObject a = stack.pop(); if (!co_flags.isFlagSet(CodeFlag.CO_FUTURE_DIVISION)) { stack.push(a._idiv(b)); } else { stack.push(a._itruediv(b)); } break; } case INPLACE_TRUE_DIVIDE: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._itruediv(b)); break; } case INPLACE_FLOOR_DIVIDE: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._ifloordiv(b)); break; } case INPLACE_MODULO: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._imod(b)); break; } case INPLACE_ADD: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._iadd(b)); break; } case INPLACE_SUBTRACT: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._isub(b)); break; } case INPLACE_LSHIFT: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._ilshift(b)); break; } case INPLACE_RSHIFT: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._irshift(b)); break; } case INPLACE_AND: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._iand(b)); break; } case INPLACE_XOR: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._ixor(b)); break; } case INPLACE_OR: { PyObject b = stack.pop(); PyObject a = stack.pop(); stack.push(a._ior(b)); break; } case SLICE: case SLICE_1: case SLICE_2: case SLICE_3: { PyObject stop = (((opcode - SLICE) & 2) != 0) ? stack.pop() : null; PyObject start = (((opcode - SLICE) & 1) != 0) ? stack.pop() : null; PyObject obj = stack.pop(); stack.push(obj.__getslice__(start, stop)); break; } case STORE_SLICE: case STORE_SLICE_1: case STORE_SLICE_2: case STORE_SLICE_3: { PyObject stop = (((opcode - STORE_SLICE) & 2) != 0) ? stack.pop() : null; PyObject start = (((opcode - STORE_SLICE) & 1) != 0) ? stack.pop() : null; PyObject obj = stack.pop(); PyObject value = stack.pop(); obj.__setslice__(start, stop, value); break; } case DELETE_SLICE: case DELETE_SLICE_1: case DELETE_SLICE_2: case DELETE_SLICE_3: { PyObject stop = (((opcode - DELETE_SLICE) & 2) != 0) ? stack.pop() : null; PyObject start = (((opcode - DELETE_SLICE) & 1) != 0) ? stack.pop() : null; PyObject obj = stack.pop(); obj.__delslice__(start, stop); break; } case STORE_SUBSCR: { PyObject key = stack.pop(); PyObject obj = stack.pop(); PyObject value = stack.pop(); obj.__setitem__(key, value); break; } case DELETE_SUBSCR: { PyObject key = stack.pop(); PyObject obj = stack.pop(); obj.__delitem__(key); break; } case PRINT_EXPR: PySystemState.displayhook(stack.pop()); break; case PRINT_ITEM_TO: Py.printComma(stack.pop(), stack.pop()); break; case PRINT_ITEM: Py.printComma(stack.pop()); break; case PRINT_NEWLINE_TO: Py.printlnv(stack.pop()); break; case PRINT_NEWLINE: Py.println(); break; case RAISE_VARARGS: switch (oparg) { case 3: { PyTraceback tb = (PyTraceback) (stack.pop()); PyObject value = stack.pop(); PyObject type = stack.pop(); throw PyException.doRaise(type, value, tb); } case 2: { PyObject value = stack.pop(); PyObject type = stack.pop(); throw PyException.doRaise(type, value, null); } case 1: { PyObject type = stack.pop(); throw PyException.doRaise(type, null, null); } case 0: throw PyException.doRaise(null, null, null); default: throw Py.SystemError("bad RAISE_VARARGS oparg"); } case LOAD_LOCALS: stack.push(f.f_locals); break; case RETURN_VALUE: retval = stack.pop(); why = Why.RETURN; break; case YIELD_VALUE: retval = stack.pop(); // Note: CPython calls f->f_stacktop = stack_pointer; here why = Why.YIELD; break; case EXEC_STMT: { PyObject locals = stack.pop(); PyObject globals = stack.pop(); PyObject code = stack.pop(); //Todo: Better make it possible to use PyFrame f here: Py.exec(code, globals == Py.None ? null : globals, locals == Py.None ? null : locals); break; } case POP_BLOCK: { PyTryBlock b = popBlock(f); while (stack.size() > b.b_level) { stack.pop(); } break; } case END_FINALLY: { // Todo: Review this regarding Python 2.7-update PyObject v = stack.pop(); if (v instanceof PyStackWhy) { why = ((PyStackWhy) v).why; assert (why != Why.YIELD); if (why == Why.RETURN || why == Why.CONTINUE) { retval = stack.pop(); } } else if (v instanceof PyStackException) { stack.top -= 2; // to pop value, traceback ts.exception = ((PyStackException) v).exception; why = Why.RERAISE; } else if (v instanceof PyString) { // This shouldn't happen, because Jython always pushes // exception type as PyException-object. // Todo: Test, if it can be removed. PyObject value = stack.pop(); PyObject traceback = stack.pop(); ts.exception = new PyException(v, value, (PyTraceback) traceback); why = Why.RERAISE; } else if (v != Py.None) { throw Py.SystemError("'finally' pops bad exception"); } break; } case BUILD_CLASS: { PyObject methods = stack.pop(); PyObject bases[] = ((PySequenceList) (stack.pop())).getArray(); String name = stack.pop().toString(); stack.push(Py.makeClass(name, bases, methods)); break; } case STORE_NAME: f.setlocal(co_names[oparg], stack.pop()); break; case DELETE_NAME: f.dellocal(co_names[oparg]); break; case UNPACK_SEQUENCE: unpack_iterable(oparg, stack); break; case STORE_ATTR: { PyObject obj = stack.pop(); PyObject v = stack.pop(); obj.__setattr__(co_names[oparg], v); break; } case DELETE_ATTR: stack.pop().__delattr__(co_names[oparg]); break; case STORE_GLOBAL: f.setglobal(co_names[oparg], stack.pop()); break; case DELETE_GLOBAL: f.delglobal(co_names[oparg]); break; case LOAD_NAME: stack.push(f.getname(co_names[oparg])); break; case LOAD_GLOBAL: stack.push(f.getglobal(co_names[oparg])); break; case DELETE_FAST: f.dellocal(oparg); break; case LOAD_CLOSURE: { // Todo: Review this regarding Python 2.7-update PyCell cell = (PyCell) (f.getclosure(oparg)); if (cell.ob_ref == null) { String name; if (oparg >= co_cellvars.length) { name = co_freevars[oparg - co_cellvars.length]; } else { name = co_cellvars[oparg]; } // XXX - consider some efficient lookup mechanism, like a hash :), // at least if co_varnames is much greater than say a certain // size (but i would think, it's not going to happen in real code. profile?) if (f.f_fastlocals != null) { int i = 0; boolean matched = false; for (String match : co_varnames) { if (match.equals(name)) { matched = true; break; } i++; } if (matched) { cell.ob_ref = f.f_fastlocals[i]; } } else { cell.ob_ref = f.f_locals.__finditem__(name); } } stack.push(cell); break; } case LOAD_DEREF: { // Todo: Review this regarding Python 2.7-update // common code from LOAD_CLOSURE PyCell cell = (PyCell) (f.getclosure(oparg)); if (cell.ob_ref == null) { String name; if (oparg >= co_cellvars.length) { name = co_freevars[oparg - co_cellvars.length]; } else { name = co_cellvars[oparg]; } // XXX - consider some efficient lookup mechanism, like a hash :), // at least if co_varnames is much greater than say a certain // size (but i would think, it's not going to happen in real code. profile?) if (f.f_fastlocals != null) { int i = 0; boolean matched = false; for (String match : co_varnames) { if (match.equals(name)) { matched = true; break; } i++; } if (matched) { cell.ob_ref = f.f_fastlocals[i]; } } else { cell.ob_ref = f.f_locals.__finditem__(name); } } stack.push(cell.ob_ref); break; } case STORE_DEREF: f.setderef(oparg, stack.pop()); break; case BUILD_TUPLE: stack.push(new PyTuple(stack.popN(oparg))); break; case BUILD_LIST: stack.push(new PyList(stack.popN(oparg))); break; case BUILD_SET: stack.push(new PySet(stack.popN(oparg))); break; case BUILD_MAP: // oparg contains initial capacity: stack.push(new PyDictionary(PyDictionary.TYPE, oparg)); break; case STORE_MAP: { PyObject key = stack.pop(); PyObject val = stack.pop(); stack.top().__setitem__(key, val); break; } case MAP_ADD: { PyObject key = stack.pop(); PyObject val = stack.pop(); stack.top(oparg).__setitem__(key, val); break; } case LOAD_ATTR: { String name = co_names[oparg]; stack.push(stack.pop().__getattr__(name)); break; } case COMPARE_OP: { PyObject b = stack.pop(); PyObject a = stack.pop(); switch (oparg) { case PyCmp_LT: stack.push(a._lt(b)); break; case PyCmp_LE: stack.push(a._le(b)); break; case PyCmp_EQ: stack.push(a._eq(b)); break; case PyCmp_NE: stack.push(a._ne(b)); break; case PyCmp_GT: stack.push(a._gt(b)); break; case PyCmp_GE: stack.push(a._ge(b)); break; case PyCmp_IN: stack.push(a._in(b)); break; case PyCmp_NOT_IN: stack.push(a._notin(b)); break; case PyCmp_IS: stack.push(a._is(b)); break; case PyCmp_IS_NOT: stack.push(a._isnot(b)); break; case PyCmp_EXC_MATCH: // Todo: Review this regarding Python 2.7-update if (a instanceof PyStackException) { PyException pye = ((PyStackException) a).exception; stack.push(Py.newBoolean(pye.match(b))); } else { stack.push(Py.newBoolean(new PyException(a).match(b))); } break; } break; } case IMPORT_NAME: { // Todo: Review this regarding Python 2.7-update PyObject __import__ = f.f_builtins.__finditem__("__import__"); if (__import__ == null) { throw Py.ImportError("__import__ not found"); } PyString name = Py.newString(co_names[oparg]); PyObject fromlist = stack.pop(); PyObject level = stack.pop(); if (level.asInt() != -1) { stack.push(__import__.__call__(new PyObject[]{name, f.f_globals, f.f_locals, fromlist, level})); } else { stack.push(__import__.__call__(new PyObject[]{name, f.f_globals, f.f_locals, fromlist})); } break; } case IMPORT_STAR: { // Todo: Review this regarding Python 2.7-update PyObject module = stack.pop(); imp.importAll(module, f); break; } case IMPORT_FROM: String name = co_names[oparg]; try { stack.push(stack.top().__getattr__(name)); } catch (PyException pye) { if (pye.match(Py.AttributeError)) { throw Py.ImportError(String.format("cannot import name %.230s", name)); } else { throw pye; } } break; case JUMP_FORWARD: next_instr += oparg; break; case POP_JUMP_IF_FALSE: if (!stack.pop().__nonzero__()) { next_instr = oparg; } break; case POP_JUMP_IF_TRUE: if (stack.pop().__nonzero__()) { next_instr = oparg; } break; case JUMP_IF_FALSE_OR_POP: if (stack.top().__nonzero__()) { --stack.top; } else { next_instr = oparg; } break; case JUMP_IF_TRUE_OR_POP: if (!stack.top().__nonzero__()) { --stack.top; } else { next_instr = oparg; } break; case JUMP_ABSOLUTE: next_instr = oparg; break; case GET_ITER: { PyObject it = stack.top().__iter__(); if (it != null) { stack.set_top(it); } break; } case FOR_ITER: { PyObject it = stack.pop(); try { PyObject x = it.__iternext__(); if (x != null) { stack.push(it); stack.push(x); break; } } catch (PyException pye) { if (!pye.match(Py.StopIteration)) { throw pye; } } next_instr += oparg; break; } case BREAK_LOOP: why = Why.BREAK; break; case CONTINUE_LOOP: retval = Py.newInteger(oparg); if (retval.__nonzero__()) { why = Why.CONTINUE; } break; case SETUP_LOOP: case SETUP_EXCEPT: case SETUP_FINALLY: pushBlock(f, new PyTryBlock(opcode, next_instr + oparg, stack.size())); break; case SETUP_WITH: { PyObject w = stack.top(); PyObject exit = w.__getattr__("__exit__"); if (exit == null) { break; } stack.set_top(exit); PyObject enter = w.__getattr__("__enter__"); if (enter == null) { break; } w = enter.__call__(); if (w == null) { break; } /* Setup a finally block (SETUP_WITH as a block is equivalent to SETUP_FINALLY except it normalizes the exception) before pushing the result of __enter__ on the stack. */ pushBlock(f, new PyTryBlock(opcode, next_instr + oparg, stack.size())); stack.push(w); break; } case WITH_CLEANUP: { /* At the top of the stack are 1-3 values indicating how/why we entered the finally clause: - TOP = None - (TOP, SECOND) = (WHY_{RETURN,CONTINUE}), retval - TOP = WHY_*; no retval below it - (TOP, SECOND, THIRD) = exc_info() Below them is EXIT, the context.__exit__ bound method. In the last case, we must call EXIT(TOP, SECOND, THIRD) otherwise we must call EXIT(None, None, None) In all cases, we remove EXIT from the stack, leaving the rest in the same order. In addition, if the stack represents an exception, *and* the function call returns a 'true' value, we "zap" this information, to prevent END_FINALLY from re-raising the exception. (But non-local gotos should still be resumed.) */ PyObject exit; PyObject u = stack.pop(), v, w; if (u == Py.None) { exit = stack.top(); stack.set_top(u); v = w = Py.None; } else if (u instanceof PyStackWhy) { switch (((PyStackWhy) u).why) { case RETURN: case CONTINUE: exit = stack.top(2); stack.set_top(2, stack.top()); stack.set_top(u); break; default: exit = stack.top(); stack.set_top(u); } u = v = w = Py.None; } else { v = stack.top(); w = stack.top(2); exit = stack.top(3); stack.set_top(u); stack.set_top(2, v); stack.set_top(3, w); } PyObject x = null; if (u instanceof PyStackException) { PyException exc = ((PyStackException) u).exception; x = exit.__call__(exc.type, exc.value, exc.traceback); } else { x = exit.__call__(u, v, w); } if (u != Py.None && x != null && x.__nonzero__()) { stack.top -= 2; // XXX - consider stack.stackadj op stack.set_top(Py.None); } break; } case CALL_FUNCTION: { // Todo: Review this regarding Python 2.7-update int na = oparg & 0xff; int nk = (oparg >> 8) & 0xff; if (nk == 0) { call_function(na, stack); } else { call_function(na, nk, stack); } break; } case CALL_FUNCTION_VAR: case CALL_FUNCTION_KW: case CALL_FUNCTION_VAR_KW: { // Todo: Review this regarding Python 2.7-update int na = oparg & 0xff; int nk = (oparg >> 8) & 0xff; int flags = (opcode - CALL_FUNCTION) & 3; call_function(na, nk, (flags & CALL_FLAG_VAR) != 0, (flags & CALL_FLAG_KW) != 0, stack); break; } case MAKE_FUNCTION: { // Todo: Review this regarding Python 2.7-update PyCode code = (PyCode) stack.pop(); PyObject[] defaults = stack.popN(oparg); PyObject doc = null; if (code instanceof PyBytecode && ((PyBytecode) code).co_consts.length > 0) { doc = ((PyBytecode) code).co_consts[0]; } PyFunction func = new PyFunction(f.f_globals, defaults, code, doc); stack.push(func); break; } case MAKE_CLOSURE: { PyCode code = (PyCode) stack.pop(); PyObject[] closure_cells = ((PySequenceList) (stack.pop())).getArray(); PyObject[] defaults = stack.popN(oparg); PyObject doc = null; if (code instanceof PyBytecode && ((PyBytecode) code).co_consts.length > 0) { doc = ((PyBytecode) code).co_consts[0]; } PyFunction func = new PyFunction(f.f_globals, defaults, code, doc, closure_cells); stack.push(func); break; } case BUILD_SLICE: { PyObject step = oparg == 3 ? stack.pop() : null; PyObject stop = stack.pop(); PyObject start = stack.pop(); stack.push(new PySlice(start, stop, step)); break; } case EXTENDED_ARG: // Todo: Review this regarding Python 2.7-update opcode = getUnsigned(co_code, next_instr++); next_instr += 2; oparg = oparg << 16 | ((getUnsigned(co_code, next_instr) << 8) + getUnsigned(co_code, next_instr - 1)); break; default: Py.print(Py.getSystemState().stderr, Py.newString( String.format("XXX lineno: %d, opcode: %d\n", f.f_lasti, opcode))); throw Py.SystemError("unknown opcode"); } // end switch } // end try catch (Throwable t) { PyException pye = Py.setException(t, f); why = Why.EXCEPTION; ts.exception = pye; if (debug) { System.err.println("Caught exception:" + pye); } } if (why == Why.YIELD) { break; } // do some trace handling here, but for now just convert to EXCEPTION if (why == Why.RERAISE) { why = Why.EXCEPTION; } while (why != Why.NOT && blocksLeft(f)) { PyTryBlock b = popBlock(f); if (debug) { System.err.println("Processing block: " + b); } assert (why != Why.YIELD); if (b.b_type == SETUP_LOOP && why == Why.CONTINUE) { pushBlock(f, b); why = Why.NOT; next_instr = retval.asInt(); break; } while (stack.size() > b.b_level) { stack.pop(); } if (b.b_type == SETUP_LOOP && why == Why.BREAK) { why = Why.NOT; next_instr = b.b_handler; break; } if (b.b_type == SETUP_FINALLY || (b.b_type == SETUP_EXCEPT && why == Why.EXCEPTION) || b.b_type == SETUP_WITH) { if (why == Why.EXCEPTION) { PyException exc = ts.exception; if (b.b_type == SETUP_EXCEPT || b.b_type == SETUP_WITH) { exc.normalize(); } stack.push(exc.traceback); stack.push(exc.value); stack.push(new PyStackException(exc)); // instead of stack.push(exc.type), like CPython } else { if (why == Why.RETURN || why == Why.CONTINUE) { stack.push(retval); } stack.push(new PyStackWhy(why)); } why = Why.NOT; next_instr = b.b_handler; break; } } // unwind block stack if (why != Why.NOT) { break; } } // end-while of the instruction loop if (why != Why.YIELD) { while (stack.size() > 0) { stack.pop(); } if (why != Why.RETURN) { retval = Py.None; } } else { // store the stack in the frame for reentry from the yield; f.f_savedlocals = stack.popN(stack.size()); } f.f_lasti = next_instr; // need to update on function entry, etc if (debug) { System.err.println(count + "," + f.f_lasti + "> Returning from " + why + ": " + retval + ", stack: " + stack.toString() + ", blocks: " + stringify_blocks(f)); } if (why == Why.EXCEPTION) { throw ts.exception; } if (co_flags.isFlagSet(CodeFlag.CO_GENERATOR) && why == Why.RETURN && retval == Py.None) { f.f_lasti = -1; } return retval; } private static void call_function(int na, PyStack stack) { switch (na) { case 0: { PyObject callable = stack.pop(); stack.push(callable.__call__()); break; } case 1: { PyObject arg = stack.pop(); PyObject callable = stack.pop(); stack.push(callable.__call__(arg)); break; } case 2: { PyObject arg1 = stack.pop(); PyObject arg0 = stack.pop(); PyObject callable = stack.pop(); stack.push(callable.__call__(arg0, arg1)); break; } case 3: { PyObject arg2 = stack.pop(); PyObject arg1 = stack.pop(); PyObject arg0 = stack.pop(); PyObject callable = stack.pop(); stack.push(callable.__call__(arg0, arg1, arg2)); break; } case 4: { PyObject arg3 = stack.pop(); PyObject arg2 = stack.pop(); PyObject arg1 = stack.pop(); PyObject arg0 = stack.pop(); PyObject callable = stack.pop(); stack.push(callable.__call__(arg0, arg1, arg2, arg3)); break; } default: { PyObject args[] = stack.popN(na); PyObject callable = stack.pop(); stack.push(callable.__call__(args)); } } } private static void call_function(int na, int nk, PyStack stack) { int n = na + nk * 2; PyObject params[] = stack.popN(n); PyObject callable = stack.pop(); PyObject args[] = new PyObject[na + nk]; String keywords[] = new String[nk]; int i; for (i = 0; i < na; i++) { args[i] = params[i]; } for (int j = 0; i < n; i += 2, j++) { keywords[j] = params[i].toString(); args[na + j] = params[i + 1]; } stack.push(callable.__call__(args, keywords)); } private static void call_function(int na, int nk, boolean var, boolean kw, PyStack stack) { int n = na + nk * 2; PyObject kwargs = kw ? stack.pop() : null; PyObject starargs = var ? stack.pop() : null; PyObject params[] = stack.popN(n); PyObject callable = stack.pop(); PyObject args[] = new PyObject[na + nk]; String keywords[] = new String[nk]; int i; for (i = 0; i < na; i++) { args[i] = params[i]; } for (int j = 0; i < n; i += 2, j++) { keywords[j] = params[i].toString(); args[na + j] = params[i + 1]; } stack.push(callable._callextra(args, keywords, starargs, kwargs)); } private static void unpack_iterable(int oparg, PyStack stack) { PyObject v = stack.pop(); int i = oparg; PyObject items[] = new PyObject[oparg]; for (PyObject item : v.asIterable()) { if (i <= 0) { throw Py.ValueError("too many values to unpack"); } i--; items[i] = item; } if (i > 0) { throw Py.ValueError(String.format("need more than %d value%s to unpack", i, i == 1 ? "" : "s")); } for (i = 0; i < oparg; i++) { stack.push(items[i]); } } private static class PyStack { final PyObject[] stack; int top = -1; PyStack(int size) { stack = new PyObject[size]; } PyObject top() { return stack[top]; } PyObject top(int n) { return stack[top - n + 1]; } PyObject pop() { return stack[top--]; } void push(PyObject v) { stack[++top] = v; } void set_top(PyObject v) { stack[top] = v; } void set_top(int n, PyObject v) { stack[top - n + 1] = v; } void dup() { stack[top + 1] = stack[top]; top++; } void dup(int n) { int oldTop = top; top += n; for (int i = 0; i < n; i++) { stack[top - i] = stack[oldTop - i]; } } PyObject[] popN(int n) { PyObject ret[] = new PyObject[n]; top -= n; for (int i = 0; i < n; i++) { ret[i] = stack[top + i + 1]; } return ret; } void rot2() { PyObject topv = stack[top]; stack[top] = stack[top - 1]; stack[top - 1] = topv; } void rot3() { PyObject v = stack[top]; PyObject w = stack[top - 1]; PyObject x = stack[top - 2]; stack[top] = w; stack[top - 1] = x; stack[top - 2] = v; } void rot4() { PyObject u = stack[top]; PyObject v = stack[top - 1]; PyObject w = stack[top - 2]; PyObject x = stack[top - 3]; stack[top] = v; stack[top - 1] = w; stack[top - 2] = x; stack[top - 3] = u; } int size() { return top + 1; } @Override public String toString() { StringBuilder buffer = new StringBuilder(); int size = size(); int N = size > 4 ? 4 : size; buffer.append("["); for (int i = 0; i < N; i++) { if (i > 0) { buffer.append(", "); } PyObject item = stack[N - i - 1]; buffer.append(upto(item.__repr__().toString())); } if (N < size) { buffer.append(String.format(", %d more...", size - N)); } buffer.append("]"); return buffer.toString(); } private String upto(String x) { return upto(x, 100); } private String upto(String x, int n) { x = x.replace('\n', '|'); if (x.length() > n) { StringBuilder item = new StringBuilder(x.substring(0, n)); item.append("..."); return item.toString(); } else { return x; } } } @Untraversable private static class PyTryBlock extends PyObject { // purely to sit on top of the existing PyFrame in f_exits!!! int b_type; /* what kind of block this is */ int b_handler; /* where to jump to find handler */ int b_level; /* value stack level to pop to */ PyTryBlock(int type, int handler, int level) { b_type = type; b_handler = handler; b_level = level; } @Override public String toString() { return "<" + get_opname().__getitem__(Py.newInteger(b_type)) + "," + b_handler + "," + b_level + ">"; } } @Override protected int getline(PyFrame f) { int addrq = f.f_lasti; int size = co_lnotab.length / 2; int p = 0; int line = co_firstlineno; int addr = 0; while (--size >= 0) { addr += getUnsigned(co_lnotab, p++); if (addr > addrq) { break; } line += getUnsigned(co_lnotab, p++); } return line; } private class LineCache { List<Integer> addr_breakpoints = new ArrayList<Integer>(); List<Integer> lines = new ArrayList<Integer>(); // length should be one more than addr_breakpoints private LineCache() { // based on dis.findlinestarts int size = co_lnotab.length / 2; int p = 0; int lastline = -1; int line = co_firstlineno; int addr = 0; while (--size >= 0) { int byte_incr = getUnsigned(co_lnotab, p++); int line_incr = getUnsigned(co_lnotab, p++); if (byte_incr > 0) { if (line != lastline) { addr_breakpoints.add(addr); lines.add(line); lastline = line; } addr += byte_incr; } line += line_incr; } if (line != lastline) { lines.add(line); } } private int getline(int addrq) { // bisect_right to the lineno int lo = 0; int hi = addr_breakpoints.size(); while (lo < hi) { int mid = (lo + hi) / 2; if (addrq < addr_breakpoints.get(mid)) { hi = mid; } else { lo = mid + 1; } } return lines.get(lo); } @Override public String toString() { return addr_breakpoints.toString() + ";" + lines.toString(); } } // Utility functions to enable storage of unsigned bytes in co_code, co_lnotab byte[] arrays private static char getUnsigned(byte[] x, int i) { byte b = x[i]; if (b < 0) { return (char) (b + 256); } else { return (char) b; } } private static String getString(byte[] x) { StringBuilder buffer = new StringBuilder(x.length); for (int i = 0; i < x.length; i++) { buffer.append(getUnsigned(x, i)); } return buffer.toString(); } private static byte[] getBytes(String s) { int len = s.length(); byte[] x = new byte[len]; for (int i = 0; i < len; i++) { x[i] = (byte) (s.charAt(i) & 0xFF); } return x; } /* Traverseproc implementation */ @Override public int traverse(Visitproc visit, Object arg) { int retValue; if (co_consts != null) { for (PyObject ob: co_consts) { if (ob != null) { retValue = visit.visit(ob, arg); if (retValue != 0) { return retValue; } } } } return 0; } @Override public boolean refersDirectlyTo(PyObject ob) { if (ob == null || co_consts == null) { return false; } else { for (PyObject obj: co_consts) { if (obj == ob) { return true; } } return false; } } }
39,974
1,013
<reponame>BestBearr/commands<filename>core/src/main/java/co/aikar/commands/CommandReplacements.java /* * Copyright (c) 2016-2017 <NAME> (Aikar) - MIT License * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package co.aikar.commands; import org.jetbrains.annotations.Nullable; import java.util.AbstractMap; import java.util.LinkedHashMap; import java.util.Locale; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Manages replacement template strings */ public class CommandReplacements { private final CommandManager manager; private final Map<String, Map.Entry<Pattern, String>> replacements = new LinkedHashMap<>(); CommandReplacements(CommandManager manager) { this.manager = manager; addReplacement0("truthy", "true|false|yes|no|1|0|on|off|t|f"); } public void addReplacements(String... replacements) { if (replacements.length == 0 || replacements.length % 2 != 0) { throw new IllegalArgumentException("Must pass a number of arguments divisible by 2."); } for (int i = 0; i < replacements.length; i += 2) { addReplacement(replacements[i], replacements[i + 1]); } } public String addReplacement(String key, String val) { return addReplacement0(key, val); } @Nullable private String addReplacement0(String key, String val) { key = ACFPatterns.PERCENTAGE.matcher(key.toLowerCase(Locale.ENGLISH)).replaceAll(""); Pattern pattern = Pattern.compile("%\\{" + Pattern.quote(key) + "}|%" + Pattern.quote(key) + "\\b", Pattern.CASE_INSENSITIVE); Map.Entry<Pattern, String> entry = new AbstractMap.SimpleImmutableEntry<>(pattern, val); Map.Entry<Pattern, String> replaced = replacements.put(key, entry); if (replaced != null) { return replaced.getValue(); } return null; } public String replace(String text) { if (text == null) { return null; } for (Map.Entry<Pattern, String> entry : replacements.values()) { text = entry.getKey().matcher(text).replaceAll(entry.getValue()); } // check for unregistered replacements Matcher matcher = ACFPatterns.REPLACEMENT_PATTERN.matcher(text); while (matcher.find()) { this.manager.log(LogLevel.ERROR, "Found unregistered replacement: " + matcher.group()); } return text; } }
1,247
707
<filename>wpilibcExamples/src/main/cpp/examples/RomiReference/include/commands/DriveDistance.h // Copyright (c) FIRST and other WPILib contributors. // Open Source Software; you can modify and/or share it under the terms of // the WPILib BSD license file in the root directory of this project. #pragma once #include <frc2/command/CommandBase.h> #include <frc2/command/CommandHelper.h> #include <units/length.h> #include "subsystems/Drivetrain.h" class DriveDistance : public frc2::CommandHelper<frc2::CommandBase, DriveDistance> { public: DriveDistance(double speed, units::meter_t distance, Drivetrain* drive) : m_speed(speed), m_distance(distance), m_drive(drive) { AddRequirements({m_drive}); } void Initialize() override; void Execute() override; void End(bool interrupted) override; bool IsFinished() override; private: double m_speed; units::meter_t m_distance; Drivetrain* m_drive; };
302
422
#include <iostream> #include <cstdlib> #include <unistd.h> #include <fstream> #include <string> #include <sstream> using namespace std; const int gridSize = 25; void printGrid(bool gridOne[gridSize+1][gridSize+1]); void determineState(bool gridOne[gridSize+1][gridSize+1]); void clearScreen(void); int main() { system( "color A" ); bool gridOne[gridSize+1][gridSize+1] = {}; int x,y,n; string nc; string start; string filename; cout << " THE GAME OF life - Implementation in C++" << endl; cout << endl; cout << endl; cout << endl; cout << "Also known simply as life, " << endl; cout << "is a cellular automaton devised by the British mathematician <NAME> in 1970." << endl; cout << endl; cout << "Rules" << endl; cout << "The universe of the Game of life is an infinite two-dimensional orthogonal grid of square cells," << endl; cout << "each of which is in one of two possible states, life or dead. Every" << endl; cout << "cell interacts with its eight neighbours, which are the cells that are horizontally, vertically, or diagonally adjacent." << endl; cout << "At each step in time, the following transitions occur:" << endl; cout << "1. Any live cell with fewer than two live neighbours dies, as if caused by under-population." << endl; cout << "2. Any live cell with two or three live neighbours lives on to the next generation." << endl; cout << "3. Any live cell with more than three live neighbours dies, as if by over-population." << endl; cout << "4. Any dead cell with exactly three live neighbours becomes a live cell, as if by reproduction." << endl; cout << endl; cout << "O - living cell" << endl; cout << ". - dead cell" << endl; cout << endl; cout << "Enter the number of cells, or 'r' to read cells from file: "; cin >> nc; cout << endl; if ( nc == "r" ) { while (true) { cout << "Enter name of file to read from: "<<endl; cin >> filename; ifstream readfile(filename); if ( readfile.is_open() ) { string fileline,xx,yy; while (getline(readfile,fileline)) { stringstream ss(fileline); getline(ss,xx,' '); getline(ss,yy,' '); x = stoi(xx); y = stoi(yy); gridOne[x][y] = true; } break; } else { cout << "No such file, try again." << endl; } } } else { for(int i=0;i<stoi(nc);i++) { cout <<stoi(nc)<< "Enter the coordinates of cell " << i+1 << " : "; cin >> x >> y; gridOne[x][y] = true; printGrid(gridOne); } } cout << "Grid setup is done. Start the game ? (y/n)" << endl; printGrid(gridOne); cin >> start; if( start == "y" || start == "Y" ) { while (true) { printGrid(gridOne); determineState(gridOne); usleep(200000); clearScreen(); } } else { return 0; } } void clearScreen(void) { // Tested and working on Ubuntu and Cygwin #if defined(_WIN32) || defined(WIN32) || defined(__MINGW32__) || defined(__BORLANDC__) #define OS_WIN #endif #ifdef OS_WIN system("CLS"); #endif #if defined(linux) || defined(__CYGWIN__) system("clear"); #endif } void printGrid(bool gridOne[gridSize+1][gridSize+1]){ for(int a = 1; a < gridSize; a++) { for(int b = 1; b < gridSize; b++) { if(gridOne[a][b] == true) { cout << " O "; } else { cout << " . "; } if(b == gridSize-1) { cout << endl; } } } } void compareGrid (bool gridOne[gridSize+1][gridSize+1], bool gridTwo[gridSize+1][gridSize+1]){ for(int a =0; a < gridSize; a++) { for(int b = 0; b < gridSize; b++) { gridTwo[a][b] = gridOne[a][b]; } } } void determineState(bool gridOne[gridSize+1][gridSize+1]){ bool gridTwo[gridSize+1][gridSize+1] = {}; compareGrid(gridOne, gridTwo); for(int a = 1; a < gridSize; a++) { for(int b = 1; b < gridSize; b++) { int alive = 0; for(int c = -1; c < 2; c++) { for(int d = -1; d < 2; d++) { if(!(c == 0 && d == 0)) { if(gridTwo[a+c][b+d]) { ++alive; } } } } if(alive < 2) { gridOne[a][b] = false; } else if(alive == 3) { gridOne[a][b] = true; } else if(alive > 3) { gridOne[a][b] = false; } } } }
2,413
852
<gh_stars>100-1000 #ifndef DataFormats_L1Trigger_HOTPDigiTwinMux_h #define DataFormats_L1Trigger_HOTPDigiTwinMux_h #include <ostream> #include "DataFormats/HcalDetId/interface/HcalDetId.h" #include <cstdint> /** \class HOTPDigiTwinMux * Simple container packer/unpacker for HO TriggerPrimittive in TwinMUX * Trigger Primitive from HO HTR * * \author Saxena, Pooja - DESY */ class HOTPDigiTwinMux { public: typedef HcalDetId key_type; /// For the sorted collection HOTPDigiTwinMux() { theTP_HO = 0; } HOTPDigiTwinMux(uint64_t data) { theTP_HO = data; } /// //////////////////////////// /// Summary of the bits /// //////////////////////////// /// raw ieta value = theTP_HO &0 0x1F /// sign of ieta (int: +/- 1) = (theTP_HO &0 0x10)?(-1):(+1)) /// absolute value of ieta = (theTP_HO &0 0x000F) /// raw iphi value = (theTP_HO>>5) &0 0x007F; /// bx() = (theTP_HO>>12) &0 0x1; /// bx signn = ( ( (theTP_HO>>13) &0 0x1) ?(-1):(+1)); /// mip value = (theTP_HO>>14) &0 0x1; /// valid bit = (theTP_HO>>15) &0 0x1; /// raw wheel value = (theTP_HO>>16) &0 0x7; /// sign of wheel (int: +/- 1) = ( ( (theTP_HO>>18) &0 0x1) ?(-1):(+1)); /// absolute value of wheel = (theTP_HO>>16) &0 0x03; /// sector value = (theTP_HO>>19) &0 0xF; /// index = (theTP_HO>>23) &0 0x1F; /// link value = (theTP_HO>>28) &0 0x3; HOTPDigiTwinMux(int ieta, int iphi, int bx, int mip, int validbit, int wheel, int sector, int index, int link); const HcalDetId id() const { return HcalDetId(HcalOuter, ieta(), iphi(), 4); } /// get raw packed HO uint64_t raw() const { return theTP_HO; } /// get the raw ieta value int raw_ieta() const { return theTP_HO & 0x1F; } /// get the sign of ieta (int: +/- 1) int ieta_sign() const { return ((theTP_HO & 0x10) ? (-1) : (+1)); } /// get the absolute value of ieta int ieta_abs() const { return (theTP_HO & 0x000F); } /// get the signed ieta value int ieta() const { return (ieta_abs() * ieta_sign()); } /// get the raw iphi value int iphi() const { return (theTP_HO >> 5) & 0x007F; } /// get the bx() int bx_abs() const { return (theTP_HO >> 12) & 0x1; } /// get the bx sign // int bx_sign() const {return ( ( (theTP_HO>>13)&0x2000) ?(-1):(+1)); } int bx_sign() const { return (((theTP_HO >> 13) & 0x1) ? (-1) : (+1)); } //get bx int bx() const { return (bx_abs() * bx_sign()); } /// get the mip value int mip() const { return (theTP_HO >> 14) & 0x1; } /// get the valid bit int validbit() const { return (theTP_HO >> 15) & 0x1; } //MIP consistency check with HO FEDs /// get the raw wheel value int raw_wheel() const { return (theTP_HO >> 16) & 0x7; } /// get the sign of wheel (int: +/- 1) int wheel_sign() const { return (((theTP_HO >> 18) & 0x1) ? (-1) : (+1)); } /// get the absolute value of wheel int wheel_abs() const { return (theTP_HO >> 16) & 0x03; } /// get the signed wheel value int wheel() const { return (wheel_abs() * wheel_sign()); } /// get the sector value int sector() const { return (theTP_HO >> 19) & 0xF; } /// get the index int index() const { return (theTP_HO >> 23) & 0x1F; } //channel index in Twinmux protocal /// get the link value int link() const { return (theTP_HO >> 28) & 0x3; } //two link for all HO wheels static const int HO_SECTOR_MAX = 12; private: uint64_t theTP_HO; }; std::ostream& operator<<(std::ostream&, const HOTPDigiTwinMux&); #endif
1,383
454
from gazette.spiders.base.fecam import FecamGazetteSpider class ScPrincesaSpider(FecamGazetteSpider): name = "sc_princesa" FECAM_QUERY = "cod_entidade:213" TERRITORY_ID = "4214151"
87
1,127
<filename>src/plugins/intel_gpu/src/kernel_selector/core/actual_kernels/roll/roll_kernel_ref.hpp // Copyright (C) 2022 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // #pragma once #include "kernel_base_opencl.h" namespace kernel_selector { struct roll_params : base_params { roll_params() : base_params(KernelType::ROLL) {} DimTensor<> shift; }; struct roll_optional_params : optional_params { roll_optional_params() : optional_params(KernelType::ROLL) {} }; class RollKernelRef : public KernelBaseOpenCL { public: RollKernelRef() : KernelBaseOpenCL{"roll_ref"} {} KernelsData GetKernelsData(const Params& params, const optional_params& options) const override; ParamsKey GetSupportedKey() const override; protected: bool Validate(const Params& params, const optional_params& options) const override; JitConstants GetJitConstants(const roll_params& kernel_params) const; }; } // namespace kernel_selector
318
839
<filename>rt/ws/policy/src/main/java/org/apache/cxf/ws/policy/PolicyVerificationOutInterceptor.java /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.ws.policy; import java.util.logging.Logger; import org.apache.cxf.common.logging.LogUtils; import org.apache.cxf.message.Message; import org.apache.cxf.message.MessageUtils; import org.apache.cxf.phase.Phase; /** * */ public class PolicyVerificationOutInterceptor extends AbstractPolicyInterceptor { public static final PolicyVerificationOutInterceptor INSTANCE = new PolicyVerificationOutInterceptor(); private static final Logger LOG = LogUtils.getL7dLogger(PolicyVerificationOutInterceptor.class); public PolicyVerificationOutInterceptor() { super(Phase.POST_STREAM); } /** * Checks if all assertions in the chosen alternative have been asserted. * Note that although the alternative was chosen in such a way that at least all * interceptors necessary to assert the assertions are present, it is not possible * to predict if these interceptors actually have asserted their assertions. * @param message * @throws PolicyException if none of the alternatives is supported */ protected void handle(Message message) { if (MessageUtils.isPartialResponse(message)) { LOG.fine("Not verifying policies on outbound partial response."); return; } AssertionInfoMap aim = message.get(AssertionInfoMap.class); if (null == aim) { return; } getTransportAssertions(message); EffectivePolicy policy = message.get(EffectivePolicy.class); if (policy == null) { return; } // CXF-1849 Log a message at FINE level if policy verification fails // on the outbound-server side of a response try { aim.checkEffectivePolicy(policy.getPolicy()); } catch (PolicyException e) { LOG.fine("An exception was thrown when verifying that the effective policy for " + "this request was satisfied. However, this exception will not result in " + "a fault. The exception raised is: " + e.toString()); return; } LOG.fine("Verified policies for outbound message."); } }
1,042
416
// // VNTypes.h // Vision // // Copyright © 2017 Apple Inc. All rights reserved. // #import <Foundation/Foundation.h> #import <Vision/VNDefines.h> typedef float VNConfidence; typedef float VNAspectRatio; typedef float VNDegrees; typedef NS_ENUM(NSUInteger, VNImageCropAndScaleOption) { VNImageCropAndScaleOptionCenterCrop = 0, // scale image maintaining aspect ratio to fit on the short side and crop centered on the long side VNImageCropAndScaleOptionScaleFit = 1, // scale to size required by algorithm VNImageCropAndScaleOptionScaleFill }; /*! @brief Barcode symbologies that are supported by the Vision framework. @discussion The actual set of barcode symbologies that can actually be recognized by a specific version of the Vision framework should be determined by using the VNRequestNameSupportedBarcodeSymbologies request. */ typedef NSString *VNBarcodeSymbology NS_STRING_ENUM; VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyAztec API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyCode39 API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyCode39Checksum API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyCode39FullASCII API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyCode39FullASCIIChecksum API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyCode93 API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyCode93i API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyCode128 API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyDataMatrix API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyEAN8 API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyEAN13 API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyI2of5 API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyI2of5Checksum API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyITF14 API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyPDF417 API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyQR API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)); VN_EXPORT VNBarcodeSymbology const VNBarcodeSymbologyUPCE API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0));
1,183
2,151
<filename>recipes/recipe_modules/infra_paths/api.py # Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from recipe_engine import recipe_api class InfraPathsApi(recipe_api.RecipeApi): """infra_paths module is glue for design mistakes. It will be removed.""" def initialize(self): path_config = self.m.properties.get('path_config') if path_config: # TODO(phajdan.jr): remove dupes from the engine and delete infra_ prefix. self.m.path.set_config('infra_' + path_config) @property def default_git_cache_dir(self): """Returns the location of the default git cache directory. This property should be used instead of using path['git_cache'] directly. It returns git_cache path if it is defined (Buildbot world), otherwise uses the more generic [CACHE]/git path (LUCI world). """ try: return self.m.path['git_cache'] except KeyError: return self.m.path['cache'].join('git')
353
3,097
// // YPRealPartitionModel.h // Wuxianda // // Created by MichaelPPP on 16/7/11. // Copyright © 2016年 michaelhuyp. All rights reserved. // 分区数据模型 #import <Foundation/Foundation.h> #import "YPRealPartitionSubIconModel.h" @interface YPRealPartitionModel : NSObject /** id */ @property (nonatomic, copy) NSString *idStr; /** 名字 */ @property (nonatomic, copy) NSString *name; /** 分类 */ @property (nonatomic, copy) NSString *area; /** 分区直播数 */ @property (nonatomic, copy) NSString *count; /** 图片模型 */ @property (nonatomic, strong) YPRealPartitionSubIconModel *sub_icon; @end
252
11,719
// Copyright (C) 2004 <NAME> (<EMAIL>) // License: Boost Software License See LICENSE.txt for the full license. #undef DLIB_ENTROPY_ENCODER_MODEL_KERNEl_ABSTRACT_ #ifdef DLIB_ENTROPY_ENCODER_MODEL_KERNEl_ABSTRACT_ #include "../algs.h" namespace dlib { template < unsigned long alphabet_size, typename entropy_encoder > class entropy_encoder_model { /*! REQUIREMENTS ON alphabet_size 1 < alphabet_size < 65535 REQUIREMENTS ON entropy_encoder is an implementation of entropy_encoder/entropy_encoder_kernel_abstract.h INITIAL VALUE Initially this object is at some predefined empty or ground state. WHAT THIS OBJECT REPRESENTS This object represents some kind of statistical model. You can use it to write symbols to an entropy_encoder and it will calculate the cumulative counts/probabilities and manage contexts for you. Note that all implementations of entropy_encoder_model and entropy_decoder_model are paired. This means that if you use entropy_encoder_model_kernel_n to encode something then you must use the corresponding entropy_decoder_model_kernel_n to decode it. Also note that this object does not perform any buffering of symbols. It writes them to its associated entropy_encoder immediately. This makes it safe to use multiple entropy_encoder_model objects with a single entropy_encoder without them trampling each other. !*/ public: typedef entropy_encoder entropy_encoder_type; entropy_encoder_model ( entropy_encoder& coder ); /*! ensures - #*this is properly initialized - &#get_entropy_encoder() == &coder throws - any exception !*/ virtual ~entropy_encoder_model ( ); /*! ensures - all memory associated with *this has been released !*/ void clear( ); /*! ensures - #*this has its initial value - does not modify get_entropy_encoder() throws - any exception if this exception is thrown then *this is unusable until clear() is called and succeeds !*/ void encode ( unsigned long symbol ); /*! requires - symbol < alphabet_size ensures - encodes and writes the symbol to get_entropy_encoder(). This also means that there is no internal buffering. symbol is written immediately to the entropy_encoder. throws - any exception If this exception is thrown then #*this is unusable until clear() is called and succeeds. !*/ entropy_encoder& get_entropy_encoder ( ); /*! ensures - returns a reference to the entropy_encoder used by *this !*/ static unsigned long get_alphabet_size ( ); /*! ensures - returns alphabet_size !*/ private: // restricted functions entropy_encoder_model(entropy_encoder_model<alphabet_size,entropy_encoder>&); // copy constructor entropy_encoder_model<alphabet_size,entropy_encoder>& operator=(entropy_encoder_model<alphabet_size,entropy_encoder>&); // assignment operator }; } #endif // DLIB_ENTROPY_ENCODER_MODEL_KERNEl_ABSTRACT_
1,781
434
<reponame>zhiminghufighting/inclavare-containers /* Copyright (c) 2021 Intel Corporation * Copyright (c) 2020-2021 Alibaba Cloud * * SPDX-License-Identifier: Apache-2.0 */ #include <enclave-tls/log.h> #include <enclave-tls/crypto_wrapper.h> crypto_wrapper_err_t nullcrypto_gen_pubkey_hash(crypto_wrapper_ctx_t *ctx, enclave_tls_cert_algo_t algo, uint8_t *hash) { ETLS_DEBUG("ctx %p, algo %d, hash %p\n", ctx, algo, hash); return CRYPTO_WRAPPER_ERR_NONE; }
203
571
<gh_stars>100-1000 package com.univocity.trader.indicators; import com.univocity.trader.candles.Aggregator; import com.univocity.trader.candles.Candle; import com.univocity.trader.indicators.base.Statistic; import com.univocity.trader.indicators.base.TimeInterval; import com.univocity.trader.strategy.Indicator; import com.univocity.trader.utils.CircularList; import java.util.function.ToDoubleFunction; import static com.univocity.trader.indicators.base.AggregatedTicksIndicator.getAggregator; public class PearsonCorrelation extends Statistic { private CircularList l1; private CircularList l2; private Indicator indicator1; private Indicator indicator2; private Aggregator aggregator1; private Aggregator aggregator2; public PearsonCorrelation(int length, TimeInterval interval, ToDoubleFunction<Candle> indicator1, ToDoubleFunction<Candle> indicator2) { super(length, interval, indicator1, indicator2); } public PearsonCorrelation(int length, Indicator indicator1, ToDoubleFunction<Candle> indicator2) { super(length, indicator1, indicator2); } public PearsonCorrelation(int length, ToDoubleFunction<Candle> indicator1, Indicator indicator2) { super(length, indicator1, indicator2); } public PearsonCorrelation(int length, Indicator indicator1, Indicator indicator2) { super(length, indicator1, indicator2); } @Override protected void initialize(Indicator indicator1, Indicator indicator2) { this.l1 = new CircularList(length); this.l2 = new CircularList(length); this.indicator1 = indicator1; this.indicator2 = indicator2; aggregator1 = getAggregator(indicator1); aggregator2 = getAggregator(indicator2); } @Override protected boolean indicatorsAccumulated(Candle candle) { return indicator1.accumulate(candle) | indicator2.accumulate(candle); } @Override protected double calculate() { l1.accumulate(indicator1.getValue(), aggregator1 != null && aggregator1.getPartial() != null); l2.accumulate(indicator2.getValue(), aggregator2 != null && aggregator2.getPartial() != null); int from1 = l1.getStartingIndex(); int from2 = l2.getStartingIndex(); int c = Math.min(l1.size(), l2.size()); double Sx = 0; double Sy = 0; double Sxx = 0; double Syy = 0; double Sxy = 0; while (c-- > 0) { double x = l1.get(from1); double y = l2.get(from2); Sx += x; Sy += y; Sxy += x * y; Sxx += x * x; Syy += y * y; from1 = (from1 + 1) % length; from2 = (from2 + 1) % length; } double n = l1.capacity(); double toSqrt = ((n * Sxx) - (Sx * Sx)) * ((n * Syy) - (Sy * Sy)); if(toSqrt > 0) { return ((n * Sxy) - (Sx * Sy)) / (Math.sqrt(toSqrt)); } return Double.NaN; } }
1,245
2,561
<filename>Options Straddle backtest.py # coding: utf-8 # In[1]: #after a long while of struggle, i finally decided to write something on options strategy #the biggest issue of options trading is to find the backtesting data #the most difficult part is options greeks #after all, data is the new black gold #here are a couple of websites u can try your luck #currently they offer free trial for a limited period # http://base2.optionsdatamine.com/page.php # https://www.historicaloptiondata.com/ #in order to save u guys from the hassle, I also include a small dataset of stoxx 50 index #the dataset has 3 spreadsheets, the spot spreadsheet refers to spot price of stoxx 50 #aug spreadsheet refers to options settle at august 2019 #jul spreadsheet refers to options settle at july 2019 # https://github.com/je-suis-tm/quant-trading/tree/master/data #if you dont know what options straddle is #i recommend u to read a tutorial from fidelity #who else can explain the concept of options than one of the largest mutual funds # https://www.fidelity.com/learning-center/investment-products/options/options-strategy-guide/long-straddle #in simple words, options are a financial derivative #that enables u to trade underlying asset at certain price in the future #and options straddle enable you to profit from a certain level of volatility #in this script, we are only gonna talk about long straddle #basically long straddle implies buy call option and put option of same strike price and same strike date #preferably at the same option price as well #otherwise asymmetric option price means there is more one-sided risk than the other #you may wanna consider strangle or strap/strip in this case #short straddle is literally shorting call option and put option of the same strike price and the same strike date #preferably at the same option price as well #long straddle has unlimited profit for upside movement and limited loss #short straddle has unlimited loss for upside movement and limited profit #short straddle is commonly used in a sideway market #long straddle is commonly used in event driven strategy #for instance, brexit on 30th of October 2019, its do or die, no ifs and buts #if bojo delivers a no-deal Brexit, uk sterling gonna sink #or he secures a new deal without backstop from macron and merkel #even though unlikely, uk sterling gonna spike #or he has to postpone and look like an idiot, uk sterling still gonna surge #either way, there will be a lot of volatility around that particular date #to secure a profit from either direction, that is when options straddle kick in #but hey, options are 3 dimensional #apart from strike date, option price, which strike price should we pick #well, that is a one million us dollar question #who says quantitative trading is about algos and calculus? #this is when u need to consult with some economists to get a base case #their fundamental analysis will determine your best/worst scenario #therefore, u can pick a good strike price to maximize your profit #or the simplest way is to find a strike price closer to the current spot price #nevertheless, as u can see in our stoxx 50 dataset #not all strike price offer both call and put options #and even if they offer both, the price of options may be very different #there could be more upside/downside from the market consensus #we can pick the options which offer both call and put options #and we only trade when both option prices are converging #and please don’t arrogantly believe that you outsmart the rest of the players in the market #all the information you have obtained from any tips may have already been priced in #finding a good pair of call and put options at the same strike price, #the same strike date and almost the same price is tough #to make our life easier, we only consider european options with cash settlement in this script import os os.chdir('d:/') import pandas as pd import numpy as np import matplotlib.pyplot as plt import re # In[2]: #as we have gathered all the available call and put options #this function will only extract strike price existing in both call and put options #this is a fundamental requirement of options straddle def find_strike_price(df): temp=[re.search('\d{4}',i).group() for i in df.columns] target=[] for i in set(temp): if temp.count(i)>1: target.append(i) return target # In[3]: #this function is merely data cleansing #merging option price information with spot price def straddle(options,spot,contractsize,strikeprice): option=options[[i for i in options.columns if strikeprice in i]] df=pd.merge(spot,option,left_index=True,right_index=True) temp=[] for i in df.columns: if 'C'+strikeprice in i: temp.append('call') elif 'P'+strikeprice in i: temp.append('put') elif 'Index' in i: temp.append('spot') else: temp.append(i) df.columns=temp #we multiply contract size with spot price here #it makes our life a lot easier later with visualization df['spot']=df['spot'].apply(lambda x:x*contractsize) return df # In[4]: #signal generation is actually very simple #just find the option pair at the closest price we can def signal_generation(df,threshold): df['signals']=np.where( np.abs( df['call']-df['put'])<threshold, 1,0) return df # In[5]: #ploting the payoff diagram def plot(df,strikeprice,contractsize): #finding trading signal #if no signal is found #we declare no suitable entry point for options straddle ind=df[df['signals']!=0].index if ind.empty: print('Strike Price at',strikeprice,'\nNo trades available.\n') return #calculate how much profit we can gain outta this profit=np.abs( df['spot'].iloc[-1]-int(strikeprice)*contractsize )-df['call'][ind[0]]-df['put'][ind[0]] y=[] #we use these two variables to plot how much we can profit at different spot price begin=round(int(strikeprice)*contractsize-5*(df['call'][ind[0]]+df['put'][ind[0]]),0) end=round(int(strikeprice)*contractsize+5*(df['call'][ind[0]]+df['put'][ind[0]]),0)+1 x=list(np.arange(int(begin),int(end))) #as u can see from the pic # https://github.com/je-suis-tm/quant-trading/blob/master/preview/options%20straddle%20payoff%20diagram.png #we only make money (green color) if the spot price is outside of a range #group1 and group2 are variables that indicate which range our line plot gets red/green color #they keep track of the indices that we switch from profit to loss or from loss to profit #as indices are always positive, we initialize them to negative values group1,group2=-10,-10 for j in x: temp=np.abs(j-int(strikeprice)*contractsize)-(df['call'][ind[0]]+df['put'][ind[0]]) y.append(temp) if temp<0 and group1<0: group1=x.index(j) if temp>0 and group1>0 and group2<0: group2=x.index(j) ax=plt.figure(figsize=(10,5)).add_subplot(111) ax.spines['bottom'].set_position(('data',0)) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) #pnl in different colors, red is loss, green is profit plt.plot(x[:group1],y[:group1],c='#57bc90',lw=5) plt.plot(x[group2:],y[group2:],c='#57bc90',lw=5) plt.plot(x[group1:group2],y[group1:group2],c='#ec576b',lw=5) #ploting strike price plt.plot([int(strikeprice)*contractsize, int(strikeprice)*contractsize], [0,-(df['call'][ind[0]]+df['put'][ind[0]])], linestyle=':',lw=3,c='#ec576b',alpha=0.5) #ploting spot price plt.axvline(df['spot'].iloc[-1],lw=5, linestyle='--',c='#e5e338',alpha=0.5) #adding annotations plt.annotate('Strike Price', xy=(int(strikeprice)*contractsize, 0), xytext=(int(strikeprice)*contractsize, df['call'][ind[0]]+df['put'][ind[0]]), arrowprops=dict(arrowstyle='simple', facecolor='#c5c1c0',), va='center',ha='center' ) plt.annotate('Lower Breakeven Point', xy=(int(strikeprice)*contractsize-(df['call'][ind[0]]+df['put'][ind[0]]), 0), xytext=(int(strikeprice)*contractsize-1.5*(df['call'][ind[0]]+df['put'][ind[0]]), -df['call'][ind[0]]-df['put'][ind[0]]), arrowprops=dict(arrowstyle='simple', facecolor='#c5c1c0'), va='center',ha='center' ) plt.annotate('Upper Breakeven Point', xy=(int(strikeprice)*contractsize+(df['call'][ind[0]]+df['put'][ind[0]]), 0), xytext=(int(strikeprice)*contractsize+1.5*(df['call'][ind[0]]+df['put'][ind[0]]), -df['call'][ind[0]]-df['put'][ind[0]]), arrowprops=dict(arrowstyle='simple', facecolor='#c5c1c0'), va='center',ha='center' ) plt.annotate('Spot Price', xy=(df['spot'].iloc[-1], 2*(df['call'][ind[0]]+df['put'][ind[0]])), xytext=(df['spot'].iloc[-1]*1.003, 2*(df['call'][ind[0]]+df['put'][ind[0]])), arrowprops=dict(arrowstyle='simple', facecolor='#c5c1c0'), va='center',ha='left' ) #limit x ticks to 3 for a tidy look plt.locator_params(axis='x',nbins=3) plt.title(f'Long Straddle Options Strategy\nP&L {round(profit,2)}') plt.ylabel('Profit & Loss') plt.xlabel('Price',labelpad=50) plt.show() # In[6]: #for stoxx 50 options, the contract size is 10 ticks per euro contractsize=10 #the threshold determines the price disparity between call and put options #the same call and put option price for the same strike price and the same strike date #only exists in an ideal world, in reality, it is like royal flush #when the price difference of call and put is smaller than 2 euros #we consider them identically the same option price threshold=2 # In[7]: def main(): data=pd.ExcelFile('stoxx50.xlsx') aug=data.parse('aug') aug.set_index('Dates',inplace=True) aug.index=pd.to_datetime(aug.index) spot=data.parse('spot') spot.set_index('Dates',inplace=True) spot.index=pd.to_datetime(spot.index) target=find_strike_price(aug) #we iterate through all the available option pairs #to find the optimal strike price to maximize our profit for strikeprice in target: df=straddle(aug,spot,contractsize,strikeprice) signal=signal_generation(df,threshold) plot(signal,strikeprice,contractsize) # In[8]: if __name__ == '__main__': main()
4,445
1,338
/* * Copyright 2005, Haiku, Inc. All Rights Reserved. * Distributed under the terms of the MIT License. */ #ifndef _FILE_PANEL_H #define _FILE_PANEL_H #include <Directory.h> #include <Entry.h> #include <Node.h> class BMessage; class BMessenger; class BWindow; struct stat; struct stat_beos; class BRefFilter { public: #if __GNUC__ > 2 virtual ~BRefFilter() {}; #endif virtual bool Filter(const entry_ref* ref, BNode* node, struct stat_beos* stat, const char* mimeType) = 0; }; enum file_panel_mode { B_OPEN_PANEL, B_SAVE_PANEL }; enum file_panel_button { B_CANCEL_BUTTON, B_DEFAULT_BUTTON }; class BFilePanel { public: BFilePanel(file_panel_mode mode = B_OPEN_PANEL, BMessenger* target = NULL, const entry_ref* directory = NULL, uint32 nodeFlavors = 0, bool allowMultipleSelection = true, BMessage* message = NULL, BRefFilter* refFilter = NULL, bool modal = false, bool hideWhenDone = true); virtual ~BFilePanel(); void Show(); void Hide(); bool IsShowing() const; virtual void WasHidden(); virtual void SelectionChanged(); virtual void SendMessage(const BMessenger* target, BMessage* message); BWindow* Window() const; BMessenger Messenger() const; BRefFilter* RefFilter() const; file_panel_mode PanelMode() const; void SetTarget(BMessenger target); void SetMessage(BMessage* message); void SetRefFilter(BRefFilter* filter); void SetSaveText(const char* text); void SetButtonLabel(file_panel_button button, const char* label); void SetNodeFlavors(uint32 flavors); void SetPanelDirectory(const BEntry* newDirectory); void SetPanelDirectory(const BDirectory* newDirectory); void SetPanelDirectory(const entry_ref* newDirectory); void SetPanelDirectory(const char* newDirectory); void GetPanelDirectory(entry_ref* ref) const; void SetHideWhenDone(bool hideWhenDone); bool HidesWhenDone() const; void Refresh(); void Rewind(); status_t GetNextSelectedRef(entry_ref* ref); private: virtual void _ReservedFilePanel1(); virtual void _ReservedFilePanel2(); virtual void _ReservedFilePanel3(); virtual void _ReservedFilePanel4(); virtual void _ReservedFilePanel5(); virtual void _ReservedFilePanel6(); virtual void _ReservedFilePanel7(); virtual void _ReservedFilePanel8(); BWindow* fWindow; uint32 _reserved[10]; }; #endif /* _FILE_PANEL_H */
928
571
<filename>ufora/FORA/CUDA/GpuCodegen_test.py # Copyright 2016 Ufora Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import ufora.FORA.python.FORA as Fora import ufora.native.FORA as ForaNative import pickle class GpuCodegenTest(unittest.TestCase): def test_basic_gpu_codegen(self): f = Fora.extractImplValContainer(Fora.eval("fun(e) { `LocalityHint((e,e+1)); e + 1 }")) v = Fora.extractImplValContainer(Fora.eval("[1,2,3,4]")) #this is a very weak way of testing this, but works as a first pass self.assertTrue("LocalityHint" in ForaNative.compileAndStringifyNativeCfgForGpu(f,v))
408
2,759
<gh_stars>1000+ /* * Copyright 2021 YugaByte, Inc. and Contributors * * Licensed under the Polyform Free Trial License 1.0.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://github.com/YugaByte/yugabyte-db/blob/master/licenses/POLYFORM-FREE-TRIAL-LICENSE-1.0.0.txt */ package com.yugabyte.yw.common; import static org.hamcrest.MatcherAssert.assertThat; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; public class ThrownMatcher extends TypeSafeMatcher<Runnable> { private static final String NOTHING = "nothing"; private final String expected; private final String expectedMessage; private final Matcher<String> messageAssertion; private String actual; private String actualMessage; public ThrownMatcher(String s, String expectedMessage, Matcher<String> messageAssertion) { expected = s; this.expectedMessage = expectedMessage; this.messageAssertion = messageAssertion; } public static Matcher<Runnable> thrown(Class<? extends Throwable> expected) { return new ThrownMatcher(expected.getName(), null, null); } public static Matcher<Runnable> thrown( Class<? extends Throwable> expected, Matcher<String> messageAssertion) { return new ThrownMatcher(expected.getName(), null, messageAssertion); } public static Matcher<Runnable> thrown( Class<? extends Throwable> expected, String expectedMessage) { return new ThrownMatcher(expected.getName(), expectedMessage, null); } @Override public boolean matchesSafely(Runnable action) { actual = NOTHING; actualMessage = NOTHING; try { action.run(); return false; } catch (Throwable t) { actual = t.getClass().getName(); actualMessage = t.getMessage(); return actual.equals(expected) && (expectedMessage == null || actualMessage.equals(expectedMessage)) && assertMessage(); } } private boolean assertMessage() { if (messageAssertion == null) { return true; } assertThat(actualMessage, messageAssertion); return true; } @Override public void describeTo(Description description) { if (!actual.equals(expected)) { description.appendText("throw " + expected); } else if (expectedMessage != null) { description.appendText("message '" + expectedMessage + "'"); } } @Override protected void describeMismatchSafely(Runnable item, Description mismatchDescription) { if (!actual.equals(expected)) { mismatchDescription.appendText("threw " + actual); } else if (expectedMessage != null) { mismatchDescription.appendText("message '" + actualMessage + "'"); } } }
909
703
<reponame>Tekh-ops/ezEngine #pragma once #include <EditorEngineProcessFramework/EditorEngineProcessFrameworkDLL.h> #include <ToolsFoundation/Object/DocumentObjectMirror.h> /// \brief An object mirror that mirrors across IPC to the engine process. /// /// One instance on the editor side needs to be initialized as sender and another /// one on the engine side as receiver. class EZ_EDITORENGINEPROCESSFRAMEWORK_DLL ezIPCObjectMirrorEngine : public ezDocumentObjectMirror { public: ezIPCObjectMirrorEngine(); ~ezIPCObjectMirrorEngine(); virtual void ApplyOp(ezObjectChange& change) override; };
184
2,180
/* * This file is part of the Heritrix web crawler (crawler.archive.org). * * Licensed to the Internet Archive (IA) by one or more individual * contributors. * * The IA licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.archive.crawler.selftest; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import org.archive.crawler.frontier.precedence.BaseUriPrecedencePolicy; import org.archive.util.ArchiveUtils; /** * Tests that operators can create precedence groups for URIs, and that URIs * in one group are crawled before URIs in another group per operator preference. * * <p>The embedded Jetty HTTP server for this test provides the following * document tree: * * <ul> * <li>seed.html</li> * <li>one/</li> * <ul> * <li>a.html</li> * <li>b.html</li> * <li>c.html</li> * </ul> * <li>five/</li> * <ul> * <li>a.html</li> * <li>b.html</li> * <li>c.html</li> * </ul> * <li>ten/</li> * <ul> * <li>a.html</li> * <li>b.html</li> * <li>c.html</li> * </ul> * </ul> * * (See the <code>engine/testdata/selftest/Precedence1SelfTest</code> * directory to view these files.) The <code>seed.html</code> file contains * links to <code>five/a.html</code>, <code>ten/a.html</code>, and * <code>one/a.html</code>, in that order. The <code>a.html</code> files link * to to the <code>b.html</code> files, and the <code>b.html</code> link to * the <code>c.html</code> files, which have no out links. * * <p>Ordinarily Heritrix would crawl these in (roughly) the order the links * are discovered: * * <ol> * <li>seed.html</li> * <li>five/a.html</li> * <li>ten/a.html</li> * <li>one/a.html</li> * <li>five/b.html</li> * <li>ten/b.html</li> * <li>one/b.html</li> * <li>five/c.html</li> * <li>ten/c.html</li> * <li>one/c.html</li> * </ol> * * <p>However, the crawl configuration for this test uses a * {@link BaseUriPrecedencePolicy} instead of the default * {@link org.archive.crawler.frontier.policy.CostUriPrecedencePolicy}. The * <code>BasePrecedencePolicy</code> is configured so that all URIs have a * precedence value of 5 unless otherwise specified. * * <p>There is a sheet named <code>HiPri</code> that overrides the * <code>base-precedence</code> to be 1 instead of 5; thus URIs associated * with the HiPri sheet should be crawled before other URIs. * Similarly, there is a sheet named <code>LoPri</code> that overrides * <code>base-precedence</code> to be 10 instead of 5. URLs associated with * LoPri should be crawled after other URLs. * * <p>The <code>one/</code> directory is associated with the HiPri sheet, and * the <code>ten/</code> directory is associated with the LoPri sheet. This * creates three "groups" of URIs: one, five and ten. All of the URIs in * group "one" should be crawled before any of the URIs in group "five" are * crawled. Similarly, all of the URIs in group "five" should be crawled before * any of the URIs in group "ten". * * <p>So the final order in which URLs should be crawled in this test is: * * <ol> * <li>seed.html</li> * <li>one/a.html</li> * <li>one/b.html</li> * <li>one/c.html</li> * <li>five/a.html</li> * <li>five/b.html</li> * <li>five/c.html</li> * <li>ten/a.html</li> * <li>ten/b.html</li> * <li>ten/c.html</li> * </ol> * * This tests ensures that the documents were crawled in the correct order. * * <p>Although this test uses the directory structure of the URIs to group the URIs * into precedence groups, because the test executes on just one machine. * But the same basic configuration could be used to group URIs by any SURT * prefix -- by host or by domain, even by top-level domain. So an operator * could associate HiPri with all .gov sites to ensure that all .gov URIs * are crawled before any non-.gov URIs. * * @author pjack */ public class Precedence1SelfTest extends SelfTestBase { /** * Expected results of the crawl. */ final private static String EXPECTED = "http://127.0.0.1:7777/robots.txt\n" + "http://127.0.0.1:7777/seed.html\n" + "http://127.0.0.1:7777/favicon.ico\n" + "http://127.0.0.1:7777/one/a.html\n" + "http://127.0.0.1:7777/one/b.html\n" + "http://127.0.0.1:7777/one/c.html\n" + "http://127.0.0.1:7777/five/a.html\n" + "http://127.0.0.1:7777/five/b.html\n" + "http://127.0.0.1:7777/five/c.html\n" + "http://127.0.0.1:7777/ten/a.html\n" + "http://127.0.0.1:7777/ten/b.html\n" + "http://127.0.0.1:7777/ten/c.html\n"; @Override protected void verify() throws Exception { File crawlLog = new File(getLogsDir(), "crawl.log"); BufferedReader br = null; String crawled = ""; try { br = new BufferedReader(new FileReader(crawlLog)); for (String s = br.readLine(); s != null; s = br.readLine()) { s = s.substring(42); int i = s.indexOf(' '); s = s.substring(0, i); crawled = crawled + s + "\n"; } } finally { ArchiveUtils.closeQuietly(br); } assertEquals(EXPECTED, crawled); } protected String getSeedsString() { return "http://127.0.0.1:7777/seed.html"; } @Override protected String changeGlobalConfig(String config) { // add a uriPrecedencePolicy with overlayable values, IF replaced // string not already gone (as if by subclass) String uriPrecedencePolicy = " <bean name=\'uriPrecedencePolicy\' class='org.archive.crawler.frontier.precedence.BaseUriPrecedencePolicy'>\n" + " <property name='basePrecedence' value='5'/>\n" + " </bean>"; config = config.replace("<!--@@BEANS_MOREBEANS@@-->", uriPrecedencePolicy); config = configureSheets(config); return super.changeGlobalConfig(config); } protected String configureSheets(String config) { // add sheets which overlay alternate precedence values for some URIs String sheets = "<bean class='org.archive.crawler.spring.SurtPrefixesSheetAssociation'>\n" + " <property name='surtPrefixes'>\n" + " <list>\n" + " <value>http://(127.0.0.1:7777)/ten</value>\n" + " </list>\n" + " </property>\n" + " <property name='targetSheetNames'>\n" + " <list>\n" + " <value>loPri</value>\n" + " </list>\n" + " </property>\n" + "</bean>\n" + "<bean id='loPri' class='org.archive.spring.Sheet'>\n" + " <property name='map'>\n" + " <map>\n" + " <entry key='preparer.uriPrecedencePolicy.basePrecedence' value='10'/>\n" + " </map>\n" + " </property>\n" + "</bean>\n" + "<bean class='org.archive.crawler.spring.SurtPrefixesSheetAssociation'>\n" + " <property name='surtPrefixes'>\n" + " <list>\n" + " <value>http://(127.0.0.1:7777)/one</value>\n" + " </list>\n" + " </property>\n" + " <property name='targetSheetNames'>\n" + " <list>\n" + " <value>hiPri</value>\n" + " </list>\n" + " </property>\n" + "</bean>\n" + "<bean id='hiPri' class='org.archive.spring.Sheet'>\n" + " <property name='map'>\n" + " <map>\n" + " <entry key='preparer.uriPrecedencePolicy.basePrecedence' value='1'/>\n" + " </map>\n" + " </property>\n" + "</bean>\n"; config = config.replace("</beans>", sheets+"</beans>"); return config; } }
3,707
428
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dodola.anole.lib; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import org.objectweb.asm.commons.GeneratorAdapter; import org.objectweb.asm.tree.LabelNode; import java.util.List; /** * A specialized redirection that handles redirecting the part that redirects the * argument construction for the super()/this() call in a constructor. * <p/> * Note that the generated bytecode does not have a direct translation to code, but as an * example, for a constructor of the form: * <code> * <init>(int x) { * super(x = 1, expr2() ? 3 : 7) * doSomething(x) * } * </code> * <p/> * it becomes: * <code> * <init>(int x) { * Change change = $change; // Move to a variable to avoid multithreading issues. * int a, b; // These variables are not needed in bytecode but are needed for the example. * if (change != null) { * Object[] locals = new Object[2]; * locals[0] = locals; // So the unboxed receiver can update this array * locals[1] = x; * Object[] constructorArguments = change.access$dispatch("init$args", locals); * x = locals[1]; * this(constructorArguments, null); * } else { * a = x = 1; * b = expr2() ? 3 : 7; * super(a, b); * } * if (change != null) { * Object[] locals = new Object[2]; * locals[0] = this; * locals[1] = x; * change.access$dispatch("init$body", locals); * return; * } * doSomething(x); * } * </code> * * @see ConstructorDelegationDetector for the generation of init$args and init$body. */ public class ConstructorArgsRedirection extends Redirection { private final String thisClassName; private final Type[] types; private final LabelNode end; private int locals; // The signature of the dynamically dispatching 'this' constructor. The final parameters is // to disambiguate from other constructors that might preexist on the class. static final String DISPATCHING_THIS_SIGNATURE = "([Ljava/lang/Object;L" + IncrementalVisitor.INSTANT_RELOAD_EXCEPTION + ";)V"; /** * @param thisClassName name of the class that this constructor is in. * @param name the name to redirect to. * @param end the label where the redirection should end (before the super()/this() call). * @param types the types of the arguments on the super()/this() call. */ ConstructorArgsRedirection(LabelNode label, String thisClassName, String name, LabelNode end, Type[] types) { super(label, name); this.thisClassName = thisClassName; this.types = types; this.end = end; locals = -1; } @Override protected void createLocals(GeneratorAdapter mv, List<Type> args) { super.createLocals(mv, args); // Override the locals creation to keep a reference to it. We keep a reference to this // array because we use it to receive the values of the local variables after the // redirection is done. locals = mv.newLocal(Type.getType("[Ljava/lang/Object;")); mv.dup(); mv.storeLocal(locals); } @Override protected void redirectLocal(GeneratorAdapter mv, int stackIndex, Type arg) { // If the stack index is 0, we do not send the local variable 0 (this) as it // cannot escape the constructor. Instead, we use this argument position to send // a reference to the locals array where the redirected method will return their // values. if (stackIndex == 0) { mv.loadLocal(locals); } else { super.redirectLocal(mv, stackIndex, arg); } } @Override protected void restore(GeneratorAdapter mv, List<Type> args) { // At this point, init$args has been called and the result Object is on the stack. // The value of that Object is Object[] with exactly n + 1 elements. // The first element is a string with the qualified name of the constructor to call. // The remaining elements are the constructtor arguments. // Create a new local that holds the result of init$args call. mv.visitTypeInsn(Opcodes.CHECKCAST, "[Ljava/lang/Object;"); int constructorArgs = mv.newLocal(Type.getType("[Ljava/lang/Object;")); mv.storeLocal(constructorArgs); // Reinstate local values mv.loadLocal(locals); int stackIndex = 0; for (int arrayIndex = 0; arrayIndex < args.size(); arrayIndex++) { Type arg = args.get(arrayIndex); // Do not restore "this" if (arrayIndex > 0) { // duplicates the array mv.dup(); // index in the array of objects to restore the boxed parameter. mv.push(arrayIndex); // get it from the array mv.arrayLoad(Type.getType(Object.class)); // unbox the argument ByteCodeUtils.unbox(mv, arg); // restore the argument mv.visitVarInsn(arg.getOpcode(Opcodes.ISTORE), stackIndex); } // stack index must progress according to the parameter type we just processed. stackIndex += arg.getSize(); } // pops the array mv.pop(); // Push a null for the marker parameter. mv.loadLocal(constructorArgs); mv.visitInsn(Opcodes.ACONST_NULL); // Invoke the constructor mv.visitMethodInsn(Opcodes.INVOKESPECIAL, thisClassName, "<init>", DISPATCHING_THIS_SIGNATURE, false); mv.goTo(end.getLabel()); } }
2,278
544
from office365.runtime.client_value import ClientValue class CustomActionElement(ClientValue): pass
28
820
<filename>datumbox-framework-core/src/main/java/com/datumbox/framework/core/machinelearning/modelselection/Validator.java /** * Copyright (C) 2013-2020 <NAME> <<EMAIL>> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datumbox.framework.core.machinelearning.modelselection; import com.datumbox.framework.common.Configuration; import com.datumbox.framework.core.common.dataobjects.Dataframe; import com.datumbox.framework.core.machinelearning.MLBuilder; import com.datumbox.framework.core.machinelearning.common.abstracts.modelers.AbstractModeler; import com.datumbox.framework.core.machinelearning.common.abstracts.modelselection.AbstractSplitter.Split; import com.datumbox.framework.core.machinelearning.common.interfaces.TrainingParameters; import com.datumbox.framework.core.machinelearning.common.interfaces.ValidationMetrics; import java.util.Iterator; import java.util.LinkedList; import java.util.List; /** * Estimates the validation metrics of a specific model. * * @author <NAME> <<EMAIL>> * @param <VM> */ public class Validator<VM extends ValidationMetrics> { private final Class<VM> vmClass; private final Configuration configuration; /** * The constructor of the K-Fold cross validator. * * @param vmClass */ public Validator(Class<VM> vmClass, Configuration configuration) { this.vmClass = vmClass; this.configuration = configuration; } /** * Estimates the average validation metrics on the provided data splits. * * @param dataSplits * @param trainingParameters * @return */ public VM validate(Iterator<Split> dataSplits, TrainingParameters trainingParameters) { AbstractModeler modeler = MLBuilder.create(trainingParameters, configuration); List<VM> validationMetricsList = new LinkedList<>(); while (dataSplits.hasNext()) { Split s = dataSplits.next(); Dataframe trainData = s.getTrain(); Dataframe testData = s.getTest(); modeler.fit(trainData); trainData.close(); modeler.predict(testData); VM entrySample = ValidationMetrics.newInstance(vmClass, testData); testData.close(); validationMetricsList.add(entrySample); } modeler.close(); VM avgValidationMetrics = ValidationMetrics.newInstance(vmClass, validationMetricsList); return avgValidationMetrics; } }
997
743
<gh_stars>100-1000 { "$schema": "https://raw.githubusercontent.com/microsoft/AdaptiveCards/6f39aedce45864ae1067ed44a5551dc973790bb5/source/nodejs/typed-schema/schema/lib/Type.json", "extends": "Action, ISelectAction", "description": "Gathers input fields, merges with optional data field, and sends an event to the client. Clients process the event by sending an Invoke activity of type adaptiveCard/action to the target Bot. The inputs that are gathered are those on the current card, and in the case of a show card those on any parent cards. See [Universal Action Model](https://docs.microsoft.com/en-us/adaptive-cards/authoring-cards/universal-action-model) documentation for more details.", "version": "1.4", "properties": { "verb": { "type": "string", "description": "The card author-defined verb associated with this action." }, "data": { "type": "string|object", "description": "Initial data that input fields will be combined with. These are essentially ‘hidden’ properties." }, "associatedInputs": { "type": "AssociatedInputs", "description": "Controls which inputs are associated with the action.", "default": "auto" } } }
369
461
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.apache.ofbiz.entity.model; import java.util.Locale; import java.util.TimeZone; import org.apache.ofbiz.base.lang.ThreadSafe; import org.apache.ofbiz.base.util.StringUtil; import org.apache.ofbiz.base.util.UtilDateTime; import org.apache.ofbiz.base.util.UtilXml; import org.w3c.dom.Element; /** * An object that models the <code>&lt;entitymodel&gt;</code> child elements that provide default values. * */ @ThreadSafe public final class ModelInfo { public static final ModelInfo DEFAULT = new ModelInfo("None", "None", getCopyrightString(), "None", "1.0", ""); /** * Returns a new <code>ModelInfo</code> instance initialized to the values found in <code>element</code> attributes. * @param defaultInfo A <code>ModelInfo</code> instance that will provide default values for missing attributes. * @param element */ public static ModelInfo createFromAttributes(ModelInfo defaultInfo, Element element) { String title = element.getAttribute("title").intern(); if (title.isEmpty()) { title = element.getAttribute("entity-name").intern(); } String description = StringUtil.internString(UtilXml.childElementValue(element, "description")); if (description == null || description.isEmpty()) { description = defaultInfo.getDescription(); } String copyright = element.getAttribute("copyright").intern(); if (copyright.isEmpty()) { copyright = defaultInfo.getCopyright(); } String author = element.getAttribute("author").intern(); if (author.isEmpty()) { author = defaultInfo.getAuthor(); } String version = element.getAttribute("version").intern(); if (version.isEmpty()) { version = defaultInfo.getVersion(); } String defaultResourceName = StringUtil.internString(element.getAttribute("default-resource-name")); if (defaultResourceName.isEmpty()) { defaultResourceName = defaultInfo.getDefaultResourceName(); } return new ModelInfo(title, description, copyright, author, version, defaultResourceName); } /** * Returns a new <code>ModelInfo</code> instance initialized to the values found in <code>element</code> child elements. * @param defaultInfo A <code>ModelInfo</code> instance that will provide default values for missing child elements. * @param element */ public static ModelInfo createFromElements(ModelInfo defaultInfo, Element element) { String title = StringUtil.internString(UtilXml.childElementValue(element, "title")); if (title == null || title.isEmpty()) { title = defaultInfo.getTitle(); } String description = StringUtil.internString(UtilXml.childElementValue(element, "description")); if (description == null || description.isEmpty()) { description = defaultInfo.getDescription(); } String copyright = StringUtil.internString(UtilXml.childElementValue(element, "copyright")); if (copyright == null || copyright.isEmpty()) { copyright = defaultInfo.getCopyright(); } String author = StringUtil.internString(UtilXml.childElementValue(element, "author")); if (author == null || author.isEmpty()) { author = defaultInfo.getAuthor(); } String version = StringUtil.internString(UtilXml.childElementValue(element, "version")); if (version == null || version.isEmpty()) { version = defaultInfo.getVersion(); } String defaultResourceName = StringUtil.internString(UtilXml.childElementValue(element, "default-resource-name")); if (defaultResourceName == null || defaultResourceName.isEmpty()) { defaultResourceName = defaultInfo.getDefaultResourceName(); } return new ModelInfo(title, description, copyright, author, version, defaultResourceName); } private static String getCopyrightString() { int year = UtilDateTime.getYear(UtilDateTime.nowTimestamp(), TimeZone.getDefault(), Locale.getDefault()); return "Copyright 2001-" + year + " The Apache Software Foundation"; } /* * Developers - this is an immutable class. Once constructed, the object should not change state. * Therefore, 'setter' methods are not allowed. If client code needs to modify the object's * state, then it can create a new copy with the changed values. */ /** The title for documentation purposes */ private final String title; /** The description for documentation purposes */ private final String description; /** The copyright for documentation purposes */ private final String copyright; /** The author for documentation purposes */ private final String author; /** The version for documentation purposes */ private final String version; /** The default-resource-name of the Entity, used with the getResource call to check for a value in a resource bundle */ private final String defaultResourceName; ModelInfo(String title, String description, String copyright, String author, String version, String defaultResourceName) { this.title = title; this.description = description; this.copyright = copyright; this.author = author; this.version = version; this.defaultResourceName = defaultResourceName; } /** Returns the author. */ public String getAuthor() { return this.author; } /** Returns the copyright. */ public String getCopyright() { return this.copyright; } /** Returns the default resource name. */ public String getDefaultResourceName() { return this.defaultResourceName; } /** Returns the description. */ public String getDescription() { return this.description; } /** Returns the title. */ public String getTitle() { return this.title; } /** Returns the version. */ public String getVersion() { return this.version; } }
2,271
46,956
<filename>src/you_get/extractors/heavymusic.py #!/usr/bin/env python __all__ = ['heavymusic_download'] from ..common import * def heavymusic_download(url, output_dir='.', merge=True, info_only=False, **kwargs): html = get_html(url) tracks = re.findall(r'href="(online2\.php[^"]+)"', html) for track in tracks: band = r1(r'band=([^&]*)', track) album = r1(r'album=([^&]*)', track) title = r1(r'track=([^&]*)', track) file_url = 'http://www.heavy-music.ru/online2.php?band=%s&album=%s&track=%s' % (parse.quote(band), parse.quote(album), parse.quote(title)) _, _, size = url_info(file_url) print_info(site_info, title, 'mp3', size) if not info_only: download_urls([file_url], title[:-4], 'mp3', size, output_dir, merge=merge) site_info = "heavy-music.ru" download = heavymusic_download download_playlist = heavymusic_download
394
874
package com.jnape.palatable.lambda.semigroup.builtin; import com.jnape.palatable.lambda.adt.Either; import com.jnape.palatable.lambda.functions.Fn1; import com.jnape.palatable.lambda.functions.specialized.SemigroupFactory; import com.jnape.palatable.lambda.monoid.Monoid; import com.jnape.palatable.lambda.semigroup.Semigroup; import static com.jnape.palatable.lambda.adt.Either.right; import static com.jnape.palatable.lambda.functions.builtin.fn1.Constantly.constantly; /** * A {@link Semigroup} instance formed by <code>{@link Either}&lt;L,R&gt;</code> and a semigroup over <code>R</code>. * The application to two {@link Either} values is right-biased, such that for a given {@link Either} <code>x</code> and * <code>y</code>: * <ul> * <li> if both <code>x</code> and <code>y</code> are right values, the result is the application of the x and y values * in terms of the provided semigroup, wrapped in {@link Either#right}</li> * <li> if only <code>x</code> is a right value, the result is <code>x</code></li> * <li> if only <code>y</code> is a right value, the result is <code>y</code></li> * <li> if neither <code>x</code> nor <code>y</code> are right values, the result is <code>y</code></li> * </ul> * <p> * For the {@link Monoid}, see {@link com.jnape.palatable.lambda.monoid.builtin.RightAny}. * * @param <L> The left parameter type * @param <R> The right parameter type * @see Semigroup * @see Either */ public final class RightAny<L, R> implements SemigroupFactory<Semigroup<R>, Either<L, R>> { private static final RightAny<?, ?> INSTANCE = new RightAny<>(); private RightAny() { } @Override public Semigroup<Either<L, R>> checkedApply(Semigroup<R> rSemigroup) { return (x, y) -> x.match(constantly(y), xR -> y.match(constantly(right(xR)), rSemigroup.apply(xR).fmap(Either::right))); } @SuppressWarnings("unchecked") public static <L, R> RightAny<L, R> rightAny() { return (RightAny<L, R>) INSTANCE; } public static <L, R> Semigroup<Either<L, R>> rightAny(Semigroup<R> rSemigroup) { return RightAny.<L, R>rightAny().apply(rSemigroup); } public static <L, R> Fn1<Either<L, R>, Either<L, R>> rightAny(Semigroup<R> rSemigroup, Either<L, R> x) { return RightAny.<L, R>rightAny(rSemigroup).apply(x); } public static <L, R> Either<L, R> rightAny(Semigroup<R> rSemigroup, Either<L, R> x, Either<L, R> y) { return rightAny(rSemigroup, x).apply(y); } }
1,022
2,151
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef COMPONENTS_OPEN_FROM_CLIPBOARD_CLIPBOARD_RECENT_CONTENT_IMPL_IOS_H_ #define COMPONENTS_OPEN_FROM_CLIPBOARD_CLIPBOARD_RECENT_CONTENT_IMPL_IOS_H_ #import <Foundation/Foundation.h> // A protocol implemented by delegates to handle clipboard changes. @protocol ClipboardRecentContentDelegate<NSObject> - (void)onClipboardChanged; @end // Helper class returning a URL if the content of the clipboard can be turned // into a URL, and if it estimates that the content of the clipboard is not too // old. @interface ClipboardRecentContentImplIOS : NSObject // |delegate| is used for metrics logging and can be nil. |authorizedSchemes| // should contain all schemes considered valid. |groupUserDefaults| is the // NSUserDefaults used to store information on pasteboard entry expiration. This // information will be shared with other applications in the application group. - (instancetype)initWithMaxAge:(NSTimeInterval)maxAge authorizedSchemes:(NSSet<NSString*>*)authorizedSchemes userDefaults:(NSUserDefaults*)groupUserDefaults delegate:(id<ClipboardRecentContentDelegate>)delegate NS_DESIGNATED_INITIALIZER; - (instancetype)init NS_UNAVAILABLE; // Returns the copied URL if the clipboard contains a recent URL that has not // been supressed. Otherwise, returns nil. - (NSURL*)recentURLFromClipboard; // Returns how old the content of the clipboard is. - (NSTimeInterval)clipboardContentAge; // Prevents GetRecentURLFromClipboard from returning anything until the // clipboard's content changes. - (void)suppressClipboardContent; // Methods below are exposed for testing purposes. // Estimation of the date when the pasteboard changed. @property(nonatomic, strong) NSDate* lastPasteboardChangeDate; // Saves information to the user defaults about the latest pasteboard entry. - (void)saveToUserDefaults; @end #endif // COMPONENTS_OPEN_FROM_CLIPBOARD_CLIPBOARD_RECENT_CONTENT_IMPL_IOS_H_
673
4,625
// Copyright 2020 JanusGraph Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.janusgraph.graphdb.query.index; import org.janusgraph.graphdb.query.condition.Condition; import java.util.HashSet; import java.util.Set; /** * @author <NAME> (<EMAIL>) */ public class IndexCandidateGroup implements Comparable<IndexCandidateGroup> { private Set<IndexCandidate> indexCandidates; private Set<Condition> coveredClauses; // initialize with the worst possible score private double score = Double.NEGATIVE_INFINITY; public IndexCandidateGroup(Set<IndexCandidate> indexCandidates) { this.indexCandidates = indexCandidates; this.coveredClauses = new HashSet<>(indexCandidates.size()); indexCandidates.forEach(c -> coveredClauses.addAll(c.getSubCover())); } public Set<IndexCandidate> getIndexCandidates() { return indexCandidates; } public Set<Condition> getCoveredClauses() { return coveredClauses; } public double getTotalScore() { if (score == Double.NEGATIVE_INFINITY) { score = indexCandidates.stream().mapToDouble(IndexCandidate::getScore).sum(); } return score; } /** * Covering more clauses, using fewer indices, and getting higher score is better * * @param that * @return */ @Override public int compareTo(IndexCandidateGroup that) { if (that == null) return 1; if (coveredClauses.size() > that.getCoveredClauses().size()) return 1; if (coveredClauses.size() < that.getCoveredClauses().size()) return -1; if (indexCandidates.size() < that.getIndexCandidates().size()) return 1; if (indexCandidates.size() > that.getIndexCandidates().size()) return -1; return Double.compare(getTotalScore(), that.getTotalScore()); } }
799
312
<filename>spring-aot/src/main/java/org/springframework/data/JpaConfigurationProcessor.java<gh_stars>100-1000 /* * Copyright 2019-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.data; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.Parameter; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.aot.context.bootstrap.generator.infrastructure.nativex.BeanFactoryNativeConfigurationProcessor; import org.springframework.aot.context.bootstrap.generator.infrastructure.nativex.DefaultNativeReflectionEntry; import org.springframework.aot.context.bootstrap.generator.infrastructure.nativex.DefaultNativeReflectionEntry.Builder; import org.springframework.aot.context.bootstrap.generator.infrastructure.nativex.NativeConfigurationRegistry; import org.springframework.aot.support.BeanFactoryProcessor; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.boot.autoconfigure.AutoConfigurationPackages; import org.springframework.boot.autoconfigure.domain.EntityScanPackages; import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; import org.springframework.context.index.CandidateComponentsIndex; import org.springframework.context.index.CandidateComponentsIndexLoader; import org.springframework.core.annotation.AnnotationFilter; import org.springframework.core.annotation.MergedAnnotation; import org.springframework.core.annotation.MergedAnnotations; import org.springframework.core.env.StandardEnvironment; import org.springframework.core.io.DefaultResourceLoader; import org.springframework.core.type.filter.AnnotationTypeFilter; import org.springframework.nativex.hint.TypeAccess; import org.springframework.util.ClassUtils; import org.springframework.util.ReflectionUtils; import org.springframework.util.StringUtils; /** * @author <NAME> */ public class JpaConfigurationProcessor implements BeanFactoryNativeConfigurationProcessor { private static Log logger = LogFactory.getLog(JpaConfigurationProcessor.class); private static final String JPA_ENTITY = "javax.persistence.Entity"; private static final String JPA_PERSISTENCE_CONTEXT = "javax.persistence.PersistenceContext"; private static final String JPA_ENTITY_LISTENERS = "javax.persistence.EntityListeners"; private static final String JPA_CONVERTER = "javax.persistence.Converter"; @Override public void process(ConfigurableListableBeanFactory beanFactory, NativeConfigurationRegistry registry) { if (ClassUtils.isPresent(JPA_ENTITY, beanFactory.getBeanClassLoader())) { logger.debug("JPA detected - processing types."); new JpaPersistenceContextProcessor().process(beanFactory, registry); new JpaEntityProcessor(beanFactory.getBeanClassLoader()).process(beanFactory, registry); new JpaAttributeConverterProcessor(beanFactory.getBeanClassLoader()).process(beanFactory, registry); } } /** * Processor to inspect components for fields that require a {@literal javax.persistence.PersistenceContext}. */ static class JpaPersistenceContextProcessor { void process(ConfigurableListableBeanFactory beanFactory, NativeConfigurationRegistry registry) { new BeanFactoryProcessor(beanFactory).processBeans( (beanType) -> TypeUtils.hasAnnotatedField(beanType, JPA_PERSISTENCE_CONTEXT), (beanName, beanType) -> registry.reflection() .forType(beanType) .withFields(TypeUtils.getAnnotatedField(beanType, JPA_PERSISTENCE_CONTEXT).toArray(new Field[0]))); } } /** * Processor to inspect {@literal javax.persistence.AttributeConverter} annotated with {@literal javax.persistence.Converter}. */ static class JpaAttributeConverterProcessor { private final Class<? extends Annotation> attributeConverterAnnotation; private final ClassLoader classLoader; public JpaAttributeConverterProcessor(ClassLoader classLoader) { this.classLoader = classLoader; attributeConverterAnnotation = loadIfPresent(JPA_CONVERTER, classLoader); } void process(ConfigurableListableBeanFactory beanFactory, NativeConfigurationRegistry registry) { if (attributeConverterAnnotation == null) { return; } Set<Class<?>> attributeConverters = new LinkedHashSet<>(); for (String packageName : getPackagesToScan(beanFactory)) { attributeConverters.addAll(scanForJpaTypes(packageName, attributeConverterAnnotation, classLoader)); } processAttributeConverters(attributeConverters, registry); } void processAttributeConverters(Set<Class<?>> attributeConverters, NativeConfigurationRegistry registry) { for (Class<?> attributeConverter : attributeConverters) { registry.reflection().forType(attributeConverter).withAccess(TypeAccess.DECLARED_CONSTRUCTORS, TypeAccess.PUBLIC_METHODS); ReflectionUtils.doWithLocalMethods(attributeConverter, it -> { if (it.isBridge()) { return; } if ("convertToEntityAttribute".equals(it.getName())) { Class<?> returnType = it.getReturnType(); if (isJavaOrPrimitiveType(returnType)) { return; } Builder builder = registry.reflection().forType(ClassUtils.getUserClass(returnType)); if(!returnType.isInterface()) { builder.withAccess(TypeAccess.DECLARED_CONSTRUCTORS); } registry.reflection().forType(returnType).withAccess(TypeAccess.PUBLIC_METHODS); } }); } } } /** * Processor to inspect user domain types annotated with {@literal javax.persistence.Entity}. */ static class JpaEntityProcessor { private final AnnotationFilter annotationFilter; private final Class<? extends Annotation> entityAnnotation; private final Set<JpaImplementation> jpaImplementations; private final ClassLoader classLoader; public JpaEntityProcessor(ClassLoader classLoader) { this.classLoader = classLoader; entityAnnotation = loadIfPresent(JPA_ENTITY, classLoader); jpaImplementations = new LinkedHashSet<>(Arrays.asList(new HibernateJpaImplementation())) .stream() .filter(it -> it.isAvailable(classLoader)) .collect(Collectors.toSet()); HashSet<String> availableNamespaces = new HashSet<>(); availableNamespaces.add("javax.persistence"); jpaImplementations.forEach(it -> availableNamespaces.add(it.getNamespace())); annotationFilter = AnnotationFilter.packages(availableNamespaces.toArray(new String[0])); } /** * Scan the path for JPA entities and process those. * Tries to look up types within the {@literal spring.components} index first and will use types discovered there if present. * Once no entities could be found in the index we'll try to find a component that defines an {@literal EntityScan} and read the * {@literal basePackages} attribute to do some potentially slow class path scanning. * * @param registry must not be {@literal null}. */ void process(ConfigurableListableBeanFactory beanFactory, NativeConfigurationRegistry registry) { Set<Class<?>> entities = readEntitiesFromIndex(); if (!entities.isEmpty()) { process(entities, registry); return; } Set<Class<?>> resolvedTypes = new LinkedHashSet<>(); for(String packageName : getPackagesToScan(beanFactory)) { resolvedTypes.addAll(scanForJpaTypes(packageName, entityAnnotation, classLoader)); } process(resolvedTypes, registry); } /** * Process JPA to level entities. * * @param entities * @param registry */ void process(Set<Class<?>> entities, NativeConfigurationRegistry registry) { TypeModelProcessor typeModelProcessor = new TypeModelProcessor(); entities.forEach(type -> { if (isJavaOrPrimitiveType(type)) { return; } /* * If an EntityListener is defined we need to inspect the target and make sure * reflection is configured so the methods can be invoked */ MergedAnnotation<Annotation> entityListener = MergedAnnotations.from(type).get(JPA_ENTITY_LISTENERS); if (entityListener.isPresent()) { Class<?>[] values = entityListener.getClassArray("value"); for (Class<?> listener : values) { registry.reflection().forType(listener).withAccess(TypeAccess.DECLARED_CONSTRUCTORS, TypeAccess.PUBLIC_METHODS); } } /* * Retrieve all reachable types and register reflection for it. * Final fields require special treatment having allowWrite set. */ typeModelProcessor.inspect(type).forEach(typeModel -> { if(typeModel.isPartOf("java") || typeModel.isPrimitiveType()) { return; } DefaultNativeReflectionEntry.Builder builder = registry.reflection().forType(typeModel.getType()); builder.withAccess(TypeAccess.DECLARED_FIELDS, TypeAccess.DECLARED_METHODS, TypeAccess.DECLARED_CONSTRUCTORS); if(typeModel.hasDeclaredClasses()) { builder.withAccess(TypeAccess.DECLARED_CLASSES); } typeModel.doWithFields(field -> { if (Modifier.isFinal(field.getModifiers())) { builder.withField(field, DefaultNativeReflectionEntry.FieldAccess.ALLOW_WRITE, DefaultNativeReflectionEntry.FieldAccess.UNSAFE); } }); typeModel.doWithAnnotatedElements(element -> { writeAnnotationConfigurationFor(element, registry); }); jpaImplementations.forEach(it -> it.process(typeModel, classLoader, registry)); }); }); } /** * Write the required configuration for annotations that belong to the persistence namespace * * @param element * @param registry */ private void writeAnnotationConfigurationFor(AnnotatedElement element, NativeConfigurationRegistry registry) { TypeUtils.resolveAnnotationsFor(element) .filter(it -> annotationFilter.matches(it.getType())) .forEach(annotation -> { // the annotation itself registry.reflection().forType(annotation.getType()).withAccess(TypeAccess.PUBLIC_CONSTRUCTORS, TypeAccess.PUBLIC_METHODS); // check if the annotation holds a reference to a class value we might require reflection for annotation.asMap().entrySet().forEach(entry -> { if(entry.getValue() instanceof Class) { Class<?> attributeValue = (Class<?>) entry.getValue(); if(!isJavaOrPrimitiveType(attributeValue)) { registry.reflection().forType(attributeValue).withAccess(TypeAccess.PUBLIC_CONSTRUCTORS, TypeAccess.PUBLIC_METHODS, TypeAccess.DECLARED_FIELDS); } } else if(entry.getValue() instanceof Class[]) { for(Class<?> attributeValue : (Class<?>[]) entry.getValue()) { if(!isJavaOrPrimitiveType(attributeValue)) { registry.reflection().forType(attributeValue).withAccess(TypeAccess.PUBLIC_CONSTRUCTORS, TypeAccess.PUBLIC_METHODS, TypeAccess.DECLARED_FIELDS); } } } }); }); if (element instanceof Constructor) { for (Parameter parameter : ((Constructor<?>) element).getParameters()) { writeAnnotationConfigurationFor(parameter, registry); } } if (element instanceof Method) { for (Parameter parameter : ((Method) element).getParameters()) { writeAnnotationConfigurationFor(parameter, registry); } } } /** * Scan the {@literal spring.components} index for types annotated with {@link #JPA_ENTITY} * * @return the {@link Set} of top level entities. */ Set<Class<?>> readEntitiesFromIndex() { CandidateComponentsIndex index = CandidateComponentsIndexLoader.loadIndex(classLoader); if (index == null) { return Collections.emptySet(); } Set<String> candidateTypes = index.getCandidateTypes("*", JPA_ENTITY); return candidateTypes.stream().map(it -> loadIfPresent(it, classLoader)).filter(it -> it != null).collect(Collectors.toSet()); } } private static boolean isJavaOrPrimitiveType(Class<?> type) { if (TypeUtils.type(type).isPartOf("java") || type.isPrimitive() || ClassUtils.isPrimitiveArray(type)) { return true; } return false; } private static List<String> getPackagesToScan(BeanFactory beanFactory) { List<String> packages = EntityScanPackages.get(beanFactory).getPackageNames(); if (packages.isEmpty() && AutoConfigurationPackages.has(beanFactory)) { packages = AutoConfigurationPackages.get(beanFactory); } return packages; } /** * Scan the classpath for types annotated with {@link #JPA_ENTITY} * * @param basePackage must not be null nor empty. * @param requiredAnnotation must not be {@literal null}. * @param classLoader must not be {@literal null}. * @return the {@link Set} of top level entities. */ static Set<Class<?>> scanForJpaTypes(String basePackage, Class<? extends Annotation> requiredAnnotation, ClassLoader classLoader) { if (requiredAnnotation == null || !StringUtils.hasText(basePackage)) { return Collections.emptySet(); } ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider(false, new StandardEnvironment()); componentProvider.setResourceLoader(new DefaultResourceLoader(classLoader)); componentProvider.addIncludeFilter(new AnnotationTypeFilter(requiredAnnotation)); Set<Class<?>> entities = new LinkedHashSet<>(); for (BeanDefinition definition : componentProvider.findCandidateComponents(basePackage)) { Class<?> type = loadIfPresent(definition.getBeanClassName(), classLoader); if (type == null) { continue; } entities.add(type); } return entities; } private interface JpaImplementation { String getNamespace(); boolean isAvailable(ClassLoader classLoader); void process(TypeModel type, ClassLoader classLoader, NativeConfigurationRegistry registry); } private static class HibernateJpaImplementation implements JpaImplementation { private Boolean present; @Override public String getNamespace() { return "org.hibernate"; } @Override public boolean isAvailable(ClassLoader classLoader) { if (present == null) { present = ClassUtils.isPresent("org.hibernate.Hibernate", classLoader); } return present; } @Override public void process(TypeModel type, ClassLoader classLoader, NativeConfigurationRegistry registry) { if (!type.getType().isEnum()) { return; } Class<Object> objectClass = loadIfPresent("org.hibernate.type.EnumType", classLoader); if (objectClass != null) { registry.reflection().forType(objectClass).withAccess(TypeAccess.DECLARED_CONSTRUCTORS); } } } private static <T> Class<T> loadIfPresent(String name, ClassLoader classLoader) { try { return (Class<T>) ClassUtils.forName(name, classLoader); } catch (ClassNotFoundException e) { // } return null; } }
5,138
521
<reponame>Fimbure/icebox-1 /* * Copyright (C) 2006 <NAME> <<EMAIL>>. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License as * published by the Free Software Foundation; either version 2 of the * License, or any later version. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ FILE_LICENCE ( GPL2_OR_LATER ); #include <stdint.h> #include <stdlib.h> #include <stdio.h> #include <string.h> #include <strings.h> #include <byteswap.h> #include <errno.h> #include <assert.h> #include <ipxe/refcnt.h> #include <ipxe/iobuf.h> #include <ipxe/xfer.h> #include <ipxe/open.h> #include <ipxe/uri.h> #include <ipxe/tcpip.h> #include <ipxe/retry.h> #include <ipxe/features.h> #include <ipxe/bitmap.h> #include <ipxe/settings.h> #include <ipxe/dhcp.h> #include <ipxe/uri.h> #include <ipxe/tftp.h> /** @file * * TFTP protocol * */ FEATURE ( FEATURE_PROTOCOL, "TFTP", DHCP_EB_FEATURE_TFTP, 1 ); /* TFTP-specific error codes */ #define EINVAL_BLKSIZE __einfo_error ( EINFO_EINVAL_BLKSIZE ) #define EINFO_EINVAL_BLKSIZE __einfo_uniqify \ ( EINFO_EINVAL, 0x01, "Invalid blksize" ) #define EINVAL_TSIZE __einfo_error ( EINFO_EINVAL_TSIZE ) #define EINFO_EINVAL_TSIZE __einfo_uniqify \ ( EINFO_EINVAL, 0x02, "Invalid tsize" ) #define EINVAL_MC_NO_PORT __einfo_error ( EINFO_EINVAL_MC_NO_PORT ) #define EINFO_EINVAL_MC_NO_PORT __einfo_uniqify \ ( EINFO_EINVAL, 0x03, "Missing multicast port" ) #define EINVAL_MC_NO_MC __einfo_error ( EINFO_EINVAL_MC_NO_MC ) #define EINFO_EINVAL_MC_NO_MC __einfo_uniqify \ ( EINFO_EINVAL, 0x04, "Missing multicast mc" ) #define EINVAL_MC_INVALID_MC __einfo_error ( EINFO_EINVAL_MC_INVALID_MC ) #define EINFO_EINVAL_MC_INVALID_MC __einfo_uniqify \ ( EINFO_EINVAL, 0x05, "Missing multicast IP" ) #define EINVAL_MC_INVALID_IP __einfo_error ( EINFO_EINVAL_MC_INVALID_IP ) #define EINFO_EINVAL_MC_INVALID_IP __einfo_uniqify \ ( EINFO_EINVAL, 0x06, "Invalid multicast IP" ) #define EINVAL_MC_INVALID_PORT __einfo_error ( EINFO_EINVAL_MC_INVALID_PORT ) #define EINFO_EINVAL_MC_INVALID_PORT __einfo_uniqify \ ( EINFO_EINVAL, 0x07, "Invalid multicast port" ) /** * A TFTP request * * This data structure holds the state for an ongoing TFTP transfer. */ struct tftp_request { /** Reference count */ struct refcnt refcnt; /** Data transfer interface */ struct interface xfer; /** URI being fetched */ struct uri *uri; /** Transport layer interface */ struct interface socket; /** Multicast transport layer interface */ struct interface mc_socket; /** Data block size * * This is the "blksize" option negotiated with the TFTP * server. (If the TFTP server does not support TFTP options, * this will default to 512). */ unsigned int blksize; /** File size * * This is the value returned in the "tsize" option from the * TFTP server. If the TFTP server does not support the * "tsize" option, this value will be zero. */ unsigned long tsize; /** Server port * * This is the port to which RRQ packets are sent. */ unsigned int port; /** Peer address * * The peer address is determined by the first response * received to the TFTP RRQ. */ struct sockaddr_tcpip peer; /** Request flags */ unsigned int flags; /** MTFTP timeout count */ unsigned int mtftp_timeouts; /** Block bitmap */ struct bitmap bitmap; /** Maximum known length * * We don't always know the file length in advance. In * particular, if the TFTP server doesn't support the tsize * option, or we are using MTFTP, then we don't know the file * length until we see the end-of-file block (which, in the * case of MTFTP, may not be the last block we see). * * This value is updated whenever we obtain information about * the file length. */ size_t filesize; /** Retransmission timer */ struct retry_timer timer; }; /** TFTP request flags */ enum { /** Send ACK packets */ TFTP_FL_SEND_ACK = 0x0001, /** Request blksize and tsize options */ TFTP_FL_RRQ_SIZES = 0x0002, /** Request multicast option */ TFTP_FL_RRQ_MULTICAST = 0x0004, /** Perform MTFTP recovery on timeout */ TFTP_FL_MTFTP_RECOVERY = 0x0008, /** Only get filesize and then abort the transfer */ TFTP_FL_SIZEONLY = 0x0010, }; /** Maximum number of MTFTP open requests before falling back to TFTP */ #define MTFTP_MAX_TIMEOUTS 3 /** * Free TFTP request * * @v refcnt Reference counter */ static void tftp_free ( struct refcnt *refcnt ) { struct tftp_request *tftp = container_of ( refcnt, struct tftp_request, refcnt ); uri_put ( tftp->uri ); bitmap_free ( &tftp->bitmap ); free ( tftp ); } /** * Mark TFTP request as complete * * @v tftp TFTP connection * @v rc Return status code */ static void tftp_done ( struct tftp_request *tftp, int rc ) { DBGC ( tftp, "TFTP %p finished with status %d (%s)\n", tftp, rc, strerror ( rc ) ); /* Stop the retry timer */ stop_timer ( &tftp->timer ); /* Close all data transfer interfaces */ intf_shutdown ( &tftp->socket, rc ); intf_shutdown ( &tftp->mc_socket, rc ); intf_shutdown ( &tftp->xfer, rc ); } /** * Reopen TFTP socket * * @v tftp TFTP connection * @ret rc Return status code */ static int tftp_reopen ( struct tftp_request *tftp ) { struct sockaddr_tcpip server; int rc; /* Close socket */ intf_restart ( &tftp->socket, 0 ); /* Disable ACK sending. */ tftp->flags &= ~TFTP_FL_SEND_ACK; /* Reset peer address */ memset ( &tftp->peer, 0, sizeof ( tftp->peer ) ); /* Open socket */ memset ( &server, 0, sizeof ( server ) ); server.st_port = htons ( tftp->port ); if ( ( rc = xfer_open_named_socket ( &tftp->socket, SOCK_DGRAM, ( struct sockaddr * ) &server, tftp->uri->host, NULL ) ) != 0 ) { DBGC ( tftp, "TFTP %p could not open socket: %s\n", tftp, strerror ( rc ) ); return rc; } return 0; } /** * Reopen TFTP multicast socket * * @v tftp TFTP connection * @v local Local socket address * @ret rc Return status code */ static int tftp_reopen_mc ( struct tftp_request *tftp, struct sockaddr *local ) { int rc; /* Close multicast socket */ intf_restart ( &tftp->mc_socket, 0 ); /* Open multicast socket. We never send via this socket, so * use the local address as the peer address (since the peer * address cannot be NULL). */ if ( ( rc = xfer_open_socket ( &tftp->mc_socket, SOCK_DGRAM, local, local ) ) != 0 ) { DBGC ( tftp, "TFTP %p could not open multicast " "socket: %s\n", tftp, strerror ( rc ) ); return rc; } return 0; } /** * Presize TFTP receive buffers and block bitmap * * @v tftp TFTP connection * @v filesize Known minimum file size * @ret rc Return status code */ static int tftp_presize ( struct tftp_request *tftp, size_t filesize ) { unsigned int num_blocks; int rc; /* Do nothing if we are already large enough */ if ( filesize <= tftp->filesize ) return 0; /* Record filesize */ tftp->filesize = filesize; /* Notify recipient of file size */ xfer_seek ( &tftp->xfer, filesize ); xfer_seek ( &tftp->xfer, 0 ); /* Calculate expected number of blocks. Note that files whose * length is an exact multiple of the blocksize will have a * trailing zero-length block, which must be included. */ num_blocks = ( ( filesize / tftp->blksize ) + 1 ); if ( ( rc = bitmap_resize ( &tftp->bitmap, num_blocks ) ) != 0 ) { DBGC ( tftp, "TFTP %p could not resize bitmap to %d blocks: " "%s\n", tftp, num_blocks, strerror ( rc ) ); return rc; } return 0; } /** * TFTP requested blocksize * * This is treated as a global configuration parameter. */ static unsigned int tftp_request_blksize = TFTP_MAX_BLKSIZE; /** * Set TFTP request blocksize * * @v blksize Requested block size */ void tftp_set_request_blksize ( unsigned int blksize ) { if ( blksize < TFTP_DEFAULT_BLKSIZE ) blksize = TFTP_DEFAULT_BLKSIZE; tftp_request_blksize = blksize; } /** * MTFTP multicast receive address * * This is treated as a global configuration parameter. */ static struct sockaddr_in tftp_mtftp_socket = { .sin_family = AF_INET, .sin_addr.s_addr = htonl ( 0xefff0101 ), .sin_port = htons ( 3001 ), }; /** * Set MTFTP multicast address * * @v address Multicast IPv4 address */ void tftp_set_mtftp_address ( struct in_addr address ) { tftp_mtftp_socket.sin_addr = address; } /** * Set MTFTP multicast port * * @v port Multicast port */ void tftp_set_mtftp_port ( unsigned int port ) { tftp_mtftp_socket.sin_port = htons ( port ); } /** * Transmit RRQ * * @v tftp TFTP connection * @ret rc Return status code */ static int tftp_send_rrq ( struct tftp_request *tftp ) { struct tftp_rrq *rrq; const char *path; size_t len; struct io_buffer *iobuf; /* Strip initial '/' if present. If we were opened via the * URI interface, then there will be an initial '/', since a * full tftp:// URI provides no way to specify a non-absolute * path. However, many TFTP servers (particularly Windows * TFTP servers) complain about having an initial '/', and it * violates user expectations to have a '/' silently added to * the DHCP-specified filename. */ path = tftp->uri->path; if ( *path == '/' ) path++; DBGC ( tftp, "TFTP %p requesting \"%s\"\n", tftp, path ); /* Allocate buffer */ len = ( sizeof ( *rrq ) + strlen ( path ) + 1 /* NUL */ + 5 + 1 /* "octet" + NUL */ + 7 + 1 + 5 + 1 /* "blksize" + NUL + ddddd + NUL */ + 5 + 1 + 1 + 1 /* "tsize" + NUL + "0" + NUL */ + 9 + 1 + 1 /* "multicast" + NUL + NUL */ ); iobuf = xfer_alloc_iob ( &tftp->socket, len ); if ( ! iobuf ) return -ENOMEM; /* Build request */ rrq = iob_put ( iobuf, sizeof ( *rrq ) ); rrq->opcode = htons ( TFTP_RRQ ); iob_put ( iobuf, snprintf ( iobuf->tail, iob_tailroom ( iobuf ), "%s%coctet", path, 0 ) + 1 ); if ( tftp->flags & TFTP_FL_RRQ_SIZES ) { iob_put ( iobuf, snprintf ( iobuf->tail, iob_tailroom ( iobuf ), "blksize%c%d%ctsize%c0", 0, tftp_request_blksize, 0, 0 ) + 1 ); } if ( tftp->flags & TFTP_FL_RRQ_MULTICAST ) { iob_put ( iobuf, snprintf ( iobuf->tail, iob_tailroom ( iobuf ), "multicast%c", 0 ) + 1 ); } /* RRQ always goes to the address specified in the initial * xfer_open() call */ return xfer_deliver_iob ( &tftp->socket, iobuf ); } /** * Transmit ACK * * @v tftp TFTP connection * @ret rc Return status code */ static int tftp_send_ack ( struct tftp_request *tftp ) { struct tftp_ack *ack; struct io_buffer *iobuf; struct xfer_metadata meta = { .dest = ( struct sockaddr * ) &tftp->peer, }; unsigned int block; /* Determine next required block number */ block = bitmap_first_gap ( &tftp->bitmap ); DBGC2 ( tftp, "TFTP %p sending ACK for block %d\n", tftp, block ); /* Allocate buffer */ iobuf = xfer_alloc_iob ( &tftp->socket, sizeof ( *ack ) ); if ( ! iobuf ) return -ENOMEM; /* Build ACK */ ack = iob_put ( iobuf, sizeof ( *ack ) ); ack->opcode = htons ( TFTP_ACK ); ack->block = htons ( block ); /* ACK always goes to the peer recorded from the RRQ response */ return xfer_deliver ( &tftp->socket, iobuf, &meta ); } /** * Transmit ERROR (Abort) * * @v tftp TFTP connection * @v errcode TFTP error code * @v errmsg Error message string * @ret rc Return status code */ static int tftp_send_error ( struct tftp_request *tftp, int errcode, const char *errmsg ) { struct tftp_error *err; struct io_buffer *iobuf; struct xfer_metadata meta = { .dest = ( struct sockaddr * ) &tftp->peer, }; size_t msglen; DBGC2 ( tftp, "TFTP %p sending ERROR %d: %s\n", tftp, errcode, errmsg ); /* Allocate buffer */ msglen = sizeof ( *err ) + strlen ( errmsg ) + 1 /* NUL */; iobuf = xfer_alloc_iob ( &tftp->socket, msglen ); if ( ! iobuf ) return -ENOMEM; /* Build ERROR */ err = iob_put ( iobuf, msglen ); err->opcode = htons ( TFTP_ERROR ); err->errcode = htons ( errcode ); strcpy ( err->errmsg, errmsg ); /* ERR always goes to the peer recorded from the RRQ response */ return xfer_deliver ( &tftp->socket, iobuf, &meta ); } /** * Transmit next relevant packet * * @v tftp TFTP connection * @ret rc Return status code */ static int tftp_send_packet ( struct tftp_request *tftp ) { /* Update retransmission timer. While name resolution takes place the * window is zero. Avoid unnecessary delay after name resolution * completes by retrying immediately. */ stop_timer ( &tftp->timer ); if ( xfer_window ( &tftp->socket ) ) { start_timer ( &tftp->timer ); } else { start_timer_nodelay ( &tftp->timer ); } /* Send RRQ or ACK as appropriate */ if ( ! tftp->peer.st_family ) { return tftp_send_rrq ( tftp ); } else { if ( tftp->flags & TFTP_FL_SEND_ACK ) { return tftp_send_ack ( tftp ); } else { return 0; } } } /** * Handle TFTP retransmission timer expiry * * @v timer Retry timer * @v fail Failure indicator */ static void tftp_timer_expired ( struct retry_timer *timer, int fail ) { struct tftp_request *tftp = container_of ( timer, struct tftp_request, timer ); int rc; /* If we are doing MTFTP, attempt the various recovery strategies */ if ( tftp->flags & TFTP_FL_MTFTP_RECOVERY ) { if ( tftp->peer.st_family ) { /* If we have received any response from the server, * try resending the RRQ to restart the download. */ DBGC ( tftp, "TFTP %p attempting reopen\n", tftp ); if ( ( rc = tftp_reopen ( tftp ) ) != 0 ) goto err; } else { /* Fall back to plain TFTP after several attempts */ tftp->mtftp_timeouts++; DBGC ( tftp, "TFTP %p timeout %d waiting for MTFTP " "open\n", tftp, tftp->mtftp_timeouts ); if ( tftp->mtftp_timeouts > MTFTP_MAX_TIMEOUTS ) { DBGC ( tftp, "TFTP %p falling back to plain " "TFTP\n", tftp ); tftp->flags = TFTP_FL_RRQ_SIZES; /* Close multicast socket */ intf_restart ( &tftp->mc_socket, 0 ); /* Reset retry timer */ start_timer_nodelay ( &tftp->timer ); /* The blocksize may change: discard * the block bitmap */ bitmap_free ( &tftp->bitmap ); memset ( &tftp->bitmap, 0, sizeof ( tftp->bitmap ) ); /* Reopen on standard TFTP port */ tftp->port = TFTP_PORT; if ( ( rc = tftp_reopen ( tftp ) ) != 0 ) goto err; } } } else { /* Not doing MTFTP (or have fallen back to plain * TFTP); fail as per normal. */ if ( fail ) { rc = -ETIMEDOUT; goto err; } } tftp_send_packet ( tftp ); return; err: tftp_done ( tftp, rc ); } /** * Process TFTP "blksize" option * * @v tftp TFTP connection * @v value Option value * @ret rc Return status code */ static int tftp_process_blksize ( struct tftp_request *tftp, const char *value ) { char *end; tftp->blksize = strtoul ( value, &end, 10 ); if ( *end ) { DBGC ( tftp, "TFTP %p got invalid blksize \"%s\"\n", tftp, value ); return -EINVAL_BLKSIZE; } DBGC ( tftp, "TFTP %p blksize=%d\n", tftp, tftp->blksize ); return 0; } /** * Process TFTP "tsize" option * * @v tftp TFTP connection * @v value Option value * @ret rc Return status code */ static int tftp_process_tsize ( struct tftp_request *tftp, const char *value ) { char *end; tftp->tsize = strtoul ( value, &end, 10 ); if ( *end ) { DBGC ( tftp, "TFTP %p got invalid tsize \"%s\"\n", tftp, value ); return -EINVAL_TSIZE; } DBGC ( tftp, "TFTP %p tsize=%ld\n", tftp, tftp->tsize ); return 0; } /** * Process TFTP "multicast" option * * @v tftp TFTP connection * @v value Option value * @ret rc Return status code */ static int tftp_process_multicast ( struct tftp_request *tftp, const char *value ) { union { struct sockaddr sa; struct sockaddr_in sin; } socket; char buf[ strlen ( value ) + 1 ]; char *addr; char *port; char *port_end; char *mc; char *mc_end; int rc; /* Split value into "addr,port,mc" fields */ memcpy ( buf, value, sizeof ( buf ) ); addr = buf; port = strchr ( addr, ',' ); if ( ! port ) { DBGC ( tftp, "TFTP %p multicast missing port,mc\n", tftp ); return -EINVAL_MC_NO_PORT; } *(port++) = '\0'; mc = strchr ( port, ',' ); if ( ! mc ) { DBGC ( tftp, "TFTP %p multicast missing mc\n", tftp ); return -EINVAL_MC_NO_MC; } *(mc++) = '\0'; /* Parse parameters */ if ( strtoul ( mc, &mc_end, 0 ) == 0 ) tftp->flags &= ~TFTP_FL_SEND_ACK; if ( *mc_end ) { DBGC ( tftp, "TFTP %p multicast invalid mc %s\n", tftp, mc ); return -EINVAL_MC_INVALID_MC; } DBGC ( tftp, "TFTP %p is%s the master client\n", tftp, ( ( tftp->flags & TFTP_FL_SEND_ACK ) ? "" : " not" ) ); if ( *addr && *port ) { socket.sin.sin_family = AF_INET; if ( inet_aton ( addr, &socket.sin.sin_addr ) == 0 ) { DBGC ( tftp, "TFTP %p multicast invalid IP address " "%s\n", tftp, addr ); return -EINVAL_MC_INVALID_IP; } DBGC ( tftp, "TFTP %p multicast IP address %s\n", tftp, inet_ntoa ( socket.sin.sin_addr ) ); socket.sin.sin_port = htons ( strtoul ( port, &port_end, 0 ) ); if ( *port_end ) { DBGC ( tftp, "TFTP %p multicast invalid port %s\n", tftp, port ); return -EINVAL_MC_INVALID_PORT; } DBGC ( tftp, "TFTP %p multicast port %d\n", tftp, ntohs ( socket.sin.sin_port ) ); if ( ( rc = tftp_reopen_mc ( tftp, &socket.sa ) ) != 0 ) return rc; } return 0; } /** A TFTP option */ struct tftp_option { /** Option name */ const char *name; /** Option processor * * @v tftp TFTP connection * @v value Option value * @ret rc Return status code */ int ( * process ) ( struct tftp_request *tftp, const char *value ); }; /** Recognised TFTP options */ static struct tftp_option tftp_options[] = { { "blksize", tftp_process_blksize }, { "tsize", tftp_process_tsize }, { "multicast", tftp_process_multicast }, { NULL, NULL } }; /** * Process TFTP option * * @v tftp TFTP connection * @v name Option name * @v value Option value * @ret rc Return status code */ static int tftp_process_option ( struct tftp_request *tftp, const char *name, const char *value ) { struct tftp_option *option; for ( option = tftp_options ; option->name ; option++ ) { if ( strcasecmp ( name, option->name ) == 0 ) return option->process ( tftp, value ); } DBGC ( tftp, "TFTP %p received unknown option \"%s\" = \"%s\"\n", tftp, name, value ); /* Unknown options should be silently ignored */ return 0; } /** * Receive OACK * * @v tftp TFTP connection * @v buf Temporary data buffer * @v len Length of temporary data buffer * @ret rc Return status code */ static int tftp_rx_oack ( struct tftp_request *tftp, void *buf, size_t len ) { struct tftp_oack *oack = buf; char *end = buf + len; char *name; char *value; char *next; int rc = 0; /* Sanity check */ if ( len < sizeof ( *oack ) ) { DBGC ( tftp, "TFTP %p received underlength OACK packet " "length %zd\n", tftp, len ); rc = -EINVAL; goto done; } /* Process each option in turn */ for ( name = oack->data ; name < end ; name = next ) { /* Parse option name and value * * We treat parsing errors as non-fatal, because there * exists at least one TFTP server (IBM Tivoli PXE * Server 172.16.17.32) that has been observed to send * malformed OACKs containing trailing garbage bytes. */ value = ( name + strnlen ( name, ( end - name ) ) + 1 ); if ( value > end ) { DBGC ( tftp, "TFTP %p received OACK with malformed " "option name:\n", tftp ); DBGC_HD ( tftp, oack, len ); break; } if ( value == end ) { DBGC ( tftp, "TFTP %p received OACK missing value " "for option \"%s\"\n", tftp, name ); DBGC_HD ( tftp, oack, len ); break; } next = ( value + strnlen ( value, ( end - value ) ) + 1 ); if ( next > end ) { DBGC ( tftp, "TFTP %p received OACK with malformed " "value for option \"%s\":\n", tftp, name ); DBGC_HD ( tftp, oack, len ); break; } /* Process option */ if ( ( rc = tftp_process_option ( tftp, name, value ) ) != 0 ) goto done; } /* Process tsize information, if available */ if ( tftp->tsize ) { if ( ( rc = tftp_presize ( tftp, tftp->tsize ) ) != 0 ) goto done; } /* Abort request if only trying to determine file size */ if ( tftp->flags & TFTP_FL_SIZEONLY ) { rc = 0; tftp_send_error ( tftp, 0, "TFTP Aborted" ); tftp_done ( tftp, rc ); return rc; } /* Request next data block */ tftp_send_packet ( tftp ); done: if ( rc ) tftp_done ( tftp, rc ); return rc; } /** * Receive DATA * * @v tftp TFTP connection * @v iobuf I/O buffer * @ret rc Return status code * * Takes ownership of I/O buffer. */ static int tftp_rx_data ( struct tftp_request *tftp, struct io_buffer *iobuf ) { struct tftp_data *data = iobuf->data; struct xfer_metadata meta; unsigned int block; off_t offset; size_t data_len; int rc; if ( tftp->flags & TFTP_FL_SIZEONLY ) { /* If we get here then server doesn't support SIZE option */ rc = -ENOTSUP; tftp_send_error ( tftp, 0, "TFTP Aborted" ); goto done; } /* Sanity check */ if ( iob_len ( iobuf ) < sizeof ( *data ) ) { DBGC ( tftp, "TFTP %p received underlength DATA packet " "length %zd\n", tftp, iob_len ( iobuf ) ); rc = -EINVAL; goto done; } /* Calculate block number */ block = ( ( bitmap_first_gap ( &tftp->bitmap ) + 1 ) & ~0xffff ); if ( data->block == 0 && block == 0 ) { DBGC ( tftp, "TFTP %p received data block 0\n", tftp ); rc = -EINVAL; goto done; } block += ( ntohs ( data->block ) - 1 ); /* Extract data */ offset = ( block * tftp->blksize ); iob_pull ( iobuf, sizeof ( *data ) ); data_len = iob_len ( iobuf ); if ( data_len > tftp->blksize ) { DBGC ( tftp, "TFTP %p received overlength DATA packet " "length %zd\n", tftp, data_len ); rc = -EINVAL; goto done; } /* Deliver data */ memset ( &meta, 0, sizeof ( meta ) ); meta.flags = XFER_FL_ABS_OFFSET; meta.offset = offset; if ( ( rc = xfer_deliver ( &tftp->xfer, iob_disown ( iobuf ), &meta ) ) != 0 ) { DBGC ( tftp, "TFTP %p could not deliver data: %s\n", tftp, strerror ( rc ) ); goto done; } /* Ensure block bitmap is ready */ if ( ( rc = tftp_presize ( tftp, ( offset + data_len ) ) ) != 0 ) goto done; /* Mark block as received */ bitmap_set ( &tftp->bitmap, block ); /* Acknowledge block */ tftp_send_packet ( tftp ); /* If all blocks have been received, finish. */ if ( bitmap_full ( &tftp->bitmap ) ) tftp_done ( tftp, 0 ); done: free_iob ( iobuf ); if ( rc ) tftp_done ( tftp, rc ); return rc; } /** * Convert TFTP error code to return status code * * @v errcode TFTP error code * @ret rc Return status code */ static int tftp_errcode_to_rc ( unsigned int errcode ) { switch ( errcode ) { case TFTP_ERR_FILE_NOT_FOUND: return -ENOENT; case TFTP_ERR_ACCESS_DENIED: return -EACCES; case TFTP_ERR_ILLEGAL_OP: return -ENOTTY; default: return -ENOTSUP; } } /** * Receive ERROR * * @v tftp TFTP connection * @v buf Temporary data buffer * @v len Length of temporary data buffer * @ret rc Return status code */ static int tftp_rx_error ( struct tftp_request *tftp, void *buf, size_t len ) { struct tftp_error *error = buf; int rc; /* Sanity check */ if ( len < sizeof ( *error ) ) { DBGC ( tftp, "TFTP %p received underlength ERROR packet " "length %zd\n", tftp, len ); return -EINVAL; } DBGC ( tftp, "TFTP %p received ERROR packet with code %d, message " "\"%s\"\n", tftp, ntohs ( error->errcode ), error->errmsg ); /* Determine final operation result */ rc = tftp_errcode_to_rc ( ntohs ( error->errcode ) ); /* Close TFTP request */ tftp_done ( tftp, rc ); return 0; } /** * Receive new data * * @v tftp TFTP connection * @v iobuf I/O buffer * @v meta Transfer metadata * @ret rc Return status code */ static int tftp_rx ( struct tftp_request *tftp, struct io_buffer *iobuf, struct xfer_metadata *meta ) { struct sockaddr_tcpip *st_src; struct tftp_common *common = iobuf->data; size_t len = iob_len ( iobuf ); int rc = -EINVAL; /* Sanity checks */ if ( len < sizeof ( *common ) ) { DBGC ( tftp, "TFTP %p received underlength packet length " "%zd\n", tftp, len ); goto done; } if ( ! meta->src ) { DBGC ( tftp, "TFTP %p received packet without source port\n", tftp ); goto done; } /* Filter by TID. Set TID on first response received */ st_src = ( struct sockaddr_tcpip * ) meta->src; if ( ! tftp->peer.st_family ) { memcpy ( &tftp->peer, st_src, sizeof ( tftp->peer ) ); DBGC ( tftp, "TFTP %p using remote port %d\n", tftp, ntohs ( tftp->peer.st_port ) ); } else if ( memcmp ( &tftp->peer, st_src, sizeof ( tftp->peer ) ) != 0 ) { DBGC ( tftp, "TFTP %p received packet from wrong source (got " "%d, wanted %d)\n", tftp, ntohs ( st_src->st_port ), ntohs ( tftp->peer.st_port ) ); goto done; } switch ( common->opcode ) { case htons ( TFTP_OACK ): rc = tftp_rx_oack ( tftp, iobuf->data, len ); break; case htons ( TFTP_DATA ): rc = tftp_rx_data ( tftp, iob_disown ( iobuf ) ); break; case htons ( TFTP_ERROR ): rc = tftp_rx_error ( tftp, iobuf->data, len ); break; default: DBGC ( tftp, "TFTP %p received strange packet type %d\n", tftp, ntohs ( common->opcode ) ); break; }; done: free_iob ( iobuf ); return rc; } /** * Receive new data via socket * * @v tftp TFTP connection * @v iobuf I/O buffer * @v meta Transfer metadata * @ret rc Return status code */ static int tftp_socket_deliver ( struct tftp_request *tftp, struct io_buffer *iobuf, struct xfer_metadata *meta ) { /* Enable sending ACKs when we receive a unicast packet. This * covers three cases: * * 1. Standard TFTP; we should always send ACKs, and will * always receive a unicast packet before we need to send the * first ACK. * * 2. RFC2090 multicast TFTP; the only unicast packets we will * receive are the OACKs; enable sending ACKs here (before * processing the OACK) and disable it when processing the * multicast option if we are not the master client. * * 3. MTFTP; receiving a unicast datagram indicates that we * are the "master client" and should send ACKs. */ tftp->flags |= TFTP_FL_SEND_ACK; return tftp_rx ( tftp, iobuf, meta ); } /** TFTP socket operations */ static struct interface_operation tftp_socket_operations[] = { INTF_OP ( xfer_deliver, struct tftp_request *, tftp_socket_deliver ), }; /** TFTP socket interface descriptor */ static struct interface_descriptor tftp_socket_desc = INTF_DESC ( struct tftp_request, socket, tftp_socket_operations ); /** TFTP multicast socket operations */ static struct interface_operation tftp_mc_socket_operations[] = { INTF_OP ( xfer_deliver, struct tftp_request *, tftp_rx ), }; /** TFTP multicast socket interface descriptor */ static struct interface_descriptor tftp_mc_socket_desc = INTF_DESC ( struct tftp_request, mc_socket, tftp_mc_socket_operations ); /** * Check flow control window * * @v tftp TFTP connection * @ret len Length of window */ static size_t tftp_xfer_window ( struct tftp_request *tftp ) { /* We abuse this data-xfer method to convey the blocksize to * the caller. This really should be done using some kind of * stat() method, but we don't yet have the facility to do * that. */ return tftp->blksize; } /** TFTP data transfer interface operations */ static struct interface_operation tftp_xfer_operations[] = { INTF_OP ( xfer_window, struct tftp_request *, tftp_xfer_window ), INTF_OP ( intf_close, struct tftp_request *, tftp_done ), }; /** TFTP data transfer interface descriptor */ static struct interface_descriptor tftp_xfer_desc = INTF_DESC ( struct tftp_request, xfer, tftp_xfer_operations ); /** * Initiate TFTP/TFTM/MTFTP download * * @v xfer Data transfer interface * @v uri Uniform Resource Identifier * @ret rc Return status code */ static int tftp_core_open ( struct interface *xfer, struct uri *uri, unsigned int default_port, struct sockaddr *multicast, unsigned int flags ) { struct tftp_request *tftp; int rc; /* Sanity checks */ if ( ! uri->host ) return -EINVAL; if ( ! uri->path ) return -EINVAL; /* Allocate and populate TFTP structure */ tftp = zalloc ( sizeof ( *tftp ) ); if ( ! tftp ) return -ENOMEM; ref_init ( &tftp->refcnt, tftp_free ); intf_init ( &tftp->xfer, &tftp_xfer_desc, &tftp->refcnt ); intf_init ( &tftp->socket, &tftp_socket_desc, &tftp->refcnt ); intf_init ( &tftp->mc_socket, &tftp_mc_socket_desc, &tftp->refcnt ); timer_init ( &tftp->timer, tftp_timer_expired, &tftp->refcnt ); tftp->uri = uri_get ( uri ); tftp->blksize = TFTP_DEFAULT_BLKSIZE; tftp->flags = flags; /* Open socket */ tftp->port = uri_port ( tftp->uri, default_port ); if ( ( rc = tftp_reopen ( tftp ) ) != 0 ) goto err; /* Open multicast socket */ if ( multicast ) { if ( ( rc = tftp_reopen_mc ( tftp, multicast ) ) != 0 ) goto err; } /* Start timer to initiate RRQ */ start_timer_nodelay ( &tftp->timer ); /* Attach to parent interface, mortalise self, and return */ intf_plug_plug ( &tftp->xfer, xfer ); ref_put ( &tftp->refcnt ); return 0; err: DBGC ( tftp, "TFTP %p could not create request: %s\n", tftp, strerror ( rc ) ); tftp_done ( tftp, rc ); ref_put ( &tftp->refcnt ); return rc; } /** * Initiate TFTP download * * @v xfer Data transfer interface * @v uri Uniform Resource Identifier * @ret rc Return status code */ static int tftp_open ( struct interface *xfer, struct uri *uri ) { return tftp_core_open ( xfer, uri, TFTP_PORT, NULL, TFTP_FL_RRQ_SIZES ); } /** TFTP URI opener */ struct uri_opener tftp_uri_opener __uri_opener = { .scheme = "tftp", .open = tftp_open, }; /** * Initiate TFTP-size request * * @v xfer Data transfer interface * @v uri Uniform Resource Identifier * @ret rc Return status code */ static int tftpsize_open ( struct interface *xfer, struct uri *uri ) { return tftp_core_open ( xfer, uri, TFTP_PORT, NULL, ( TFTP_FL_RRQ_SIZES | TFTP_FL_SIZEONLY ) ); } /** TFTP URI opener */ struct uri_opener tftpsize_uri_opener __uri_opener = { .scheme = "tftpsize", .open = tftpsize_open, }; /** * Initiate TFTM download * * @v xfer Data transfer interface * @v uri Uniform Resource Identifier * @ret rc Return status code */ static int tftm_open ( struct interface *xfer, struct uri *uri ) { return tftp_core_open ( xfer, uri, TFTP_PORT, NULL, ( TFTP_FL_RRQ_SIZES | TFTP_FL_RRQ_MULTICAST ) ); } /** TFTM URI opener */ struct uri_opener tftm_uri_opener __uri_opener = { .scheme = "tftm", .open = tftm_open, }; /** * Initiate MTFTP download * * @v xfer Data transfer interface * @v uri Uniform Resource Identifier * @ret rc Return status code */ static int mtftp_open ( struct interface *xfer, struct uri *uri ) { return tftp_core_open ( xfer, uri, MTFTP_PORT, ( struct sockaddr * ) &tftp_mtftp_socket, TFTP_FL_MTFTP_RECOVERY ); } /** MTFTP URI opener */ struct uri_opener mtftp_uri_opener __uri_opener = { .scheme = "mtftp", .open = mtftp_open, }; /****************************************************************************** * * Settings * ****************************************************************************** */ /** * Apply TFTP configuration settings * * @ret rc Return status code */ static int tftp_apply_settings ( void ) { static struct in_addr tftp_server = { 0 }; struct in_addr last_tftp_server; char uri_string[32]; struct uri *uri; /* Retrieve TFTP server setting */ last_tftp_server = tftp_server; fetch_ipv4_setting ( NULL, &next_server_setting, &tftp_server ); /* If TFTP server setting has changed, set the current working * URI to match. Do it only when the TFTP server has changed * to try to minimise surprises to the user, who probably * won't expect the CWURI to change just because they updated * an unrelated setting and triggered all the settings * applicators. */ if ( tftp_server.s_addr != last_tftp_server.s_addr ) { if ( tftp_server.s_addr ) { snprintf ( uri_string, sizeof ( uri_string ), "tftp://%s/", inet_ntoa ( tftp_server ) ); uri = parse_uri ( uri_string ); if ( ! uri ) return -ENOMEM; } else { uri = NULL; } churi ( uri ); uri_put ( uri ); } return 0; } /** TFTP settings applicator */ struct settings_applicator tftp_settings_applicator __settings_applicator = { .apply = tftp_apply_settings, };
13,502
313
<gh_stars>100-1000 {"status_id":9005374858330112,"text":"Nabuti pikro wuib pa setta fordut pe ijize zauv girobfa suz nana to. #ner","user":{"user_id":921246128668672,"name":"<NAME>","screen_name":"@kep","created_at":661803003,"followers_count":23,"friends_count":11,"favourites_count":30},"created_at":814872663,"favorite_count":1,"retweet_count":301,"entities":{"hashtags":[{"text":"#ner","indices":[9,18]}]},"in_reply_to_status_id":null}
174
325
import jax import jax.numpy as jnp import numpy as np import pytest import equinox as eqx def test_is_array(getkey): objs = [ 1, 2.0, [2.0], True, object(), jnp.array([1]), jnp.array(1.0), np.array(1.0), np.array(1), eqx.nn.Linear(1, 1, key=getkey()), ] results = [False, False, False, False, False, True, True, False, False, False] for o, r in zip(objs, results): assert eqx.is_array(o) == r def test_is_array_like(getkey): objs = [ 1, 2.0, [2.0], True, object(), jnp.array([1]), jnp.array(1.0), np.array(1.0), np.array(1), eqx.nn.Linear(1, 1, key=getkey()), ] results = [True, True, False, True, False, True, True, True, True, False] for o, r in zip(objs, results): assert eqx.is_array_like(o) == r def test_is_inexact_array(getkey): objs = [ 1, 2.0, [2.0], True, object(), jnp.array([1]), jnp.array(1.0), np.array(1.0), np.array(1), eqx.nn.Linear(1, 1, key=getkey()), ] results = [False, False, False, False, False, False, True, False, False, False] for o, r in zip(objs, results): assert eqx.is_inexact_array(o) == r def test_is_inexact_array_like(getkey): objs = [ 1, 2.0, [2.0], True, object(), jnp.array([1]), jnp.array(1.0), np.array(1.0), np.array(1), eqx.nn.Linear(1, 1, key=getkey()), ] results = [False, True, False, False, False, False, True, True, False, False] for o, r in zip(objs, results): assert eqx.is_inexact_array_like(o) == r def test_filter(getkey): filter_fn = lambda x: isinstance(x, int) for pytree in ( [ 1, 2, [ 3, "hi", {"a": jnp.array(1), "b": 4, "c": eqx.nn.MLP(2, 2, 2, 2, key=getkey())}, ], ], [1, 1, 1, 1, "hi"], ): filtered = eqx.filter(pytree, filter_spec=filter_fn) for arg in jax.tree_leaves(filtered): assert isinstance(arg, int) num_int_leaves = sum( 1 for leaf in jax.tree_leaves(filtered) if isinstance(leaf, int) ) assert len(jax.tree_leaves(filtered)) == num_int_leaves filter_spec = [False, True, [filter_fn, True]] sentinel = object() pytree = [ eqx.nn.Linear(1, 1, key=getkey()), eqx.nn.Linear(1, 1, key=getkey()), [eqx.nn.Linear(1, 1, key=getkey()), sentinel], ] filtered = eqx.filter(pytree, filter_spec=filter_spec) none_linear = jax.tree_map(lambda _: None, eqx.nn.Linear(1, 1, key=getkey())) assert filtered[0] is None assert filtered[1] == pytree[1] assert filtered[2][0] == none_linear assert filtered[2][1] is sentinel with pytest.raises(ValueError): eqx.filter(pytree, filter_spec=filter_spec[1:]) def test_partition_and_combine(getkey): filter_fn = lambda x: isinstance(x, int) for pytree in ( [ 1, 2, [ 3, "hi", {"a": jnp.array(1), "b": 4, "c": eqx.nn.MLP(2, 2, 2, 2, key=getkey())}, ], ], [1, 1, 1, 1, "hi"], ): filtered, unfiltered = eqx.partition(pytree, filter_spec=filter_fn) for arg in jax.tree_leaves(filtered): assert isinstance(arg, int) for arg in jax.tree_leaves(unfiltered): assert not isinstance(arg, int) assert eqx.combine(filtered, unfiltered) == pytree assert eqx.combine(unfiltered, filtered) == pytree def test_splitfn_and_merge(getkey): filter_fn = lambda x: isinstance(x, int) for pytree in ( [ 1, 2, [ 3, "hi", {"a": jnp.array(1), "b": 4, "c": eqx.nn.MLP(2, 2, 2, 2, key=getkey())}, ], ], [1, 1, 1, 1, "hi"], ): int_args, notint_args, which, treedef = eqx.split(pytree, filter_fn=filter_fn) for arg in int_args: assert isinstance(arg, int) for arg in notint_args: assert not isinstance(arg, int) assert sum(which) == 4 re_pytree = eqx.merge(int_args, notint_args, which, treedef) assert re_pytree == pytree def test_splittree_and_merge(getkey): linear = eqx.nn.Linear(1, 1, key=getkey()) linear_tree = jax.tree_map(lambda _: True, linear) filter_tree = [ True, False, [False, False, {"a": True, "b": False, "c": linear_tree}], ] for i, pytree in enumerate( ( [1, 2, [3, True, {"a": jnp.array(1), "b": 4, "c": linear}]], [1, 1, [1, 1, {"a": 1, "b": 1, "c": linear}]], ) ): keep_args, notkeep_args, which, treedef = eqx.split( pytree, filter_tree=filter_tree ) if i == 0: assert set(notkeep_args) == {2, 3, True, 4} else: assert notkeep_args == [1, 1, 1, 1] assert sum(which) == 4 re_pytree = eqx.merge(keep_args, notkeep_args, which, treedef) assert re_pytree == pytree filter_tree = [True, [False, False]] pytree = [True, None] with pytest.raises(ValueError): eqx.split(pytree, filter_tree=filter_tree)
2,970
777
<reponame>google-ar/chromium<filename>components/sync/engine/attachments/attachment_store_backend.cc // Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "components/sync/engine/attachments/attachment_store_backend.h" #include "base/location.h" #include "base/sequenced_task_runner.h" namespace syncer { AttachmentStoreBackend::AttachmentStoreBackend( const scoped_refptr<base::SequencedTaskRunner>& callback_task_runner) : callback_task_runner_(callback_task_runner) {} AttachmentStoreBackend::~AttachmentStoreBackend() {} void AttachmentStoreBackend::PostCallback(const base::Closure& callback) { callback_task_runner_->PostTask(FROM_HERE, callback); } } // namespace syncer
256
708
<reponame>nairobi222/PopsTabView package com.ccj.poptabview.listener; import com.ccj.poptabview.base.SuperListener; import java.util.List; /** * 复杂筛选的回调 * Created by chenchangjun on 17/7/7. */ public interface OnSortItemClickListener extends SuperListener { /** * 筛选分类、商城点击 */ void onSortItemClick(int position, List<Integer> filterTabBeen); }
169
1,561
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.appengine; import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.googleapis.auth.oauth2.GoogleAuthorizationCodeFlow; import com.google.api.client.http.GenericUrl; import com.google.api.client.http.HttpTransport; import com.google.api.client.http.javanet.NetHttpTransport; import com.google.api.client.json.JsonFactory; import com.google.api.client.json.jackson2.JacksonFactory; import com.google.api.client.util.store.MemoryDataStoreFactory; import com.google.api.services.oauth2.Oauth2; import com.google.api.services.oauth2.model.Userinfo; import java.io.IOException; import java.util.Arrays; import java.util.List; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; public class Utils { /** Get application name from the runtime environment variable */ static final String APP_NAME = System.getenv("GAE_APPLICATION"); /** * Global instance of the {@link DataStoreFactory}. The best practice is to make it a single * globally shared instance across your application. */ private static final MemoryDataStoreFactory DATA_STORE_FACTORY = MemoryDataStoreFactory.getDefaultInstance(); /** Global instance of the HTTP transport. */ static final HttpTransport HTTP_TRANSPORT = new NetHttpTransport(); /** Global instance of the JSON factory. */ static final JsonFactory JSON_FACTORY = JacksonFactory.getDefaultInstance(); /** Set your OAuth 2.0 Client Credentials */ private static String CLIENT_ID = System.getenv("CLIENT_ID"); private static String CLIENT_SECRET = System.getenv("CLIENT_SECRET"); /** Scopes for requesting access to Google OAuth2 API */ private static final List<String> SCOPES = Arrays.asList( "https://www.googleapis.com/auth/userinfo.profile", "https://www.googleapis.com/auth/userinfo.email"); /** Returns the redirect URI for the given HTTP servlet request. */ static String getRedirectUri(HttpServletRequest req) { GenericUrl url = new GenericUrl(req.getRequestURL().toString()); url.setRawPath("/oauth2callback"); return url.build(); } // [START gae_java11_oauth2_code_flow] /** * Loads the authorization code flow to be used across all HTTP servlet requests. It is only * called during the first HTTP servlet request. */ public static GoogleAuthorizationCodeFlow newFlow() throws IOException { return new GoogleAuthorizationCodeFlow.Builder( HTTP_TRANSPORT, JSON_FACTORY, CLIENT_ID, CLIENT_SECRET, SCOPES) .setDataStoreFactory(DATA_STORE_FACTORY) .setAccessType("offline") .build(); } // [END gae_java11_oauth2_code_flow] /** * Returns the user ID for the given HTTP servlet request. This identifies your application's user * and is used to assign and persist credentials to that user. Most commonly, this will be a user * id stored in the session or even the session id itself. */ static String getUserId(HttpServletRequest req) throws ServletException, IOException { return req.getSession().getId(); } // [START gae_java11_oauth2_get_user] /** Obtain end-user authorization grant for Google APIs and return username */ public static String getUserInfo(Credential credential) throws IOException { Oauth2 oauth2Client = new Oauth2.Builder(HTTP_TRANSPORT, JSON_FACTORY, credential) .setApplicationName(APP_NAME) .build(); // Retrieve user profile Userinfo userInfo = oauth2Client.userinfo().get().execute(); String username = userInfo.getGivenName(); return username; } // [END gae_java11_oauth2_get_user] }
1,332
5,279
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.mongodb; import java.security.KeyStore; import java.security.cert.X509Certificate; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; /** Utility class for registration of ssl context, and to allow all certificate requests. */ @SuppressWarnings({ "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402) }) class SSLUtils { /** static class to allow all requests. */ private static final TrustManager[] trustAllCerts = new TrustManager[] { new X509TrustManager() { @Override public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } @Override public void checkClientTrusted(X509Certificate[] certs, String authType) {} @Override public void checkServerTrusted(X509Certificate[] certs, String authType) {} } }; /** * register ssl contects to accept all issue certificates. * * @return SSLContext */ static SSLContext ignoreSSLCertificate() { try { // Install the all-trusting trust manager SSLContext sc = SSLContext.getInstance("TLS"); sc.init(null, trustAllCerts, new java.security.SecureRandom()); KeyStore ks = KeyStore.getInstance("JKS"); ks.load( SSLUtils.class.getClassLoader().getResourceAsStream("resources/.keystore"), "changeit".toCharArray()); KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); kmf.init(ks, "changeit".toCharArray()); SSLContext ctx = SSLContext.getInstance("TLS"); ctx.init(kmf.getKeyManagers(), trustAllCerts, null); SSLContext.setDefault(ctx); return ctx; } catch (Exception e) { throw new RuntimeException(e); } } }
925
839
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.sts.token.provider; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Properties; import org.w3c.dom.Element; import org.apache.cxf.jaxws.context.WrappedMessageContext; import org.apache.cxf.message.MessageImpl; import org.apache.cxf.sts.STSConstants; import org.apache.cxf.sts.StaticSTSProperties; import org.apache.cxf.sts.common.CustomAttributeProvider; import org.apache.cxf.sts.common.PasswordCallbackHandler; import org.apache.cxf.sts.request.KeyRequirements; import org.apache.cxf.sts.request.TokenRequirements; import org.apache.cxf.sts.service.EncryptionProperties; import org.apache.wss4j.common.WSS4JConstants; import org.apache.wss4j.common.crypto.Crypto; import org.apache.wss4j.common.crypto.CryptoFactory; import org.apache.wss4j.common.ext.WSSecurityException; import org.apache.wss4j.common.principal.CustomTokenPrincipal; import org.apache.wss4j.common.saml.builder.SAML1Constants; import org.apache.wss4j.common.saml.builder.SAML2Constants; import org.apache.wss4j.common.util.DOM2Writer; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; /** * Some unit tests for creating custom SAML Tokens. */ public class SAMLProviderCustomTest { /** * Create a custom Saml1 Attribute Assertion. */ @org.junit.Test public void testCustomSaml1AttributeAssertion() throws Exception { TokenProvider samlTokenProvider = new SAMLTokenProvider(); TokenProviderParameters providerParameters = createProviderParameters(WSS4JConstants.WSS_SAML_TOKEN_TYPE, STSConstants.BEARER_KEY_KEYTYPE); List<AttributeStatementProvider> customProviderList = Collections.singletonList(new CustomAttributeProvider()); ((SAMLTokenProvider)samlTokenProvider).setAttributeStatementProviders(customProviderList); assertTrue(samlTokenProvider.canHandleToken(WSS4JConstants.WSS_SAML_TOKEN_TYPE)); TokenProviderResponse providerResponse = samlTokenProvider.createToken(providerParameters); assertNotNull(providerResponse); assertTrue(providerResponse.getToken() != null && providerResponse.getTokenId() != null); Element token = (Element)providerResponse.getToken(); String tokenString = DOM2Writer.nodeToString(token); assertTrue(tokenString.contains(providerResponse.getTokenId())); assertTrue(tokenString.contains("AttributeStatement")); assertFalse(tokenString.contains("AuthenticationStatement")); assertTrue(tokenString.contains("alice")); assertTrue(tokenString.contains("http://cxf.apache.org/sts/custom")); } /** * Create a custom Saml2 Authentication Assertion. */ @org.junit.Test public void testCustomSaml2AuthenticationAssertion() throws Exception { TokenProvider samlTokenProvider = new SAMLTokenProvider(); TokenProviderParameters providerParameters = createProviderParameters(WSS4JConstants.WSS_SAML2_TOKEN_TYPE, STSConstants.BEARER_KEY_KEYTYPE); List<AuthenticationStatementProvider> customProviderList = Collections.singletonList( new CustomAuthenticationProvider()); ((SAMLTokenProvider)samlTokenProvider).setAuthenticationStatementProviders(customProviderList); assertTrue(samlTokenProvider.canHandleToken(WSS4JConstants.WSS_SAML2_TOKEN_TYPE)); TokenProviderResponse providerResponse = samlTokenProvider.createToken(providerParameters); assertNotNull(providerResponse); assertTrue(providerResponse.getToken() != null && providerResponse.getTokenId() != null); Element token = (Element)providerResponse.getToken(); String tokenString = DOM2Writer.nodeToString(token); assertTrue(tokenString.contains(providerResponse.getTokenId())); assertFalse(tokenString.contains("AttributeStatement")); assertTrue(tokenString.contains("AuthnStatement")); assertTrue(tokenString.contains(SAML2Constants.AUTH_CONTEXT_CLASS_REF_X509)); assertTrue(tokenString.contains("alice")); } /** * Create a custom Saml1 Authentication Assertion. */ @org.junit.Test public void testCustomSaml1AuthenticationAssertion() throws Exception { TokenProvider samlTokenProvider = new SAMLTokenProvider(); TokenProviderParameters providerParameters = createProviderParameters(WSS4JConstants.WSS_SAML_TOKEN_TYPE, STSConstants.BEARER_KEY_KEYTYPE); List<AuthenticationStatementProvider> customProviderList = Collections.singletonList( new CustomAuthenticationProvider()); ((SAMLTokenProvider)samlTokenProvider).setAuthenticationStatementProviders(customProviderList); assertTrue(samlTokenProvider.canHandleToken(WSS4JConstants.WSS_SAML_TOKEN_TYPE)); TokenProviderResponse providerResponse = samlTokenProvider.createToken(providerParameters); assertNotNull(providerResponse); assertTrue(providerResponse.getToken() != null && providerResponse.getTokenId() != null); Element token = (Element)providerResponse.getToken(); String tokenString = DOM2Writer.nodeToString(token); assertTrue(tokenString.contains(providerResponse.getTokenId())); assertFalse(tokenString.contains("AttributeStatement")); assertTrue(tokenString.contains("AuthenticationStatement")); assertTrue(tokenString.contains(SAML1Constants.AUTH_METHOD_X509)); assertTrue(tokenString.contains("alice")); } /** * Create a custom Saml2 Authentication and Attribute Assertion. */ @org.junit.Test public void testCustomSaml2CombinedAssertion() throws Exception { TokenProvider samlTokenProvider = new SAMLTokenProvider(); TokenProviderParameters providerParameters = createProviderParameters(WSS4JConstants.WSS_SAML2_TOKEN_TYPE, STSConstants.BEARER_KEY_KEYTYPE); List<AuthenticationStatementProvider> customProviderList = Collections.singletonList( new CustomAuthenticationProvider()); ((SAMLTokenProvider)samlTokenProvider).setAuthenticationStatementProviders(customProviderList); List<AttributeStatementProvider> customAttributeProviderList = Collections.singletonList( new CustomAttributeProvider()); ((SAMLTokenProvider)samlTokenProvider).setAttributeStatementProviders(customAttributeProviderList); assertTrue(samlTokenProvider.canHandleToken(WSS4JConstants.WSS_SAML2_TOKEN_TYPE)); TokenProviderResponse providerResponse = samlTokenProvider.createToken(providerParameters); assertNotNull(providerResponse); assertTrue(providerResponse.getToken() != null && providerResponse.getTokenId() != null); Element token = (Element)providerResponse.getToken(); String tokenString = DOM2Writer.nodeToString(token); assertTrue(tokenString.contains(providerResponse.getTokenId())); assertTrue(tokenString.contains("AttributeStatement")); assertTrue(tokenString.contains("AuthnStatement")); assertTrue(tokenString.contains("alice")); } /** * Create a custom Saml1 (Multiple) Attribute Assertion. */ @org.junit.Test public void testCustomSaml1MultipleAssertion() throws Exception { TokenProvider samlTokenProvider = new SAMLTokenProvider(); TokenProviderParameters providerParameters = createProviderParameters(WSS4JConstants.WSS_SAML_TOKEN_TYPE, STSConstants.BEARER_KEY_KEYTYPE); List<AttributeStatementProvider> customProviderList = Arrays.asList( new CustomAttributeProvider(), new CustomAttributeProvider()); ((SAMLTokenProvider)samlTokenProvider).setAttributeStatementProviders(customProviderList); assertTrue(samlTokenProvider.canHandleToken(WSS4JConstants.WSS_SAML_TOKEN_TYPE)); TokenProviderResponse providerResponse = samlTokenProvider.createToken(providerParameters); assertNotNull(providerResponse); assertTrue(providerResponse.getToken() != null && providerResponse.getTokenId() != null); Element token = (Element)providerResponse.getToken(); String tokenString = DOM2Writer.nodeToString(token); assertTrue(tokenString.contains(providerResponse.getTokenId())); assertTrue(tokenString.contains("AttributeStatement")); assertFalse(tokenString.contains("AuthenticationStatement")); assertTrue(tokenString.contains("alice")); assertTrue(tokenString.contains("http://cxf.apache.org/sts/custom")); } /** * Create a custom Saml2 AuthDecision Assertion. */ @org.junit.Test public void testCustomSaml2AuthDecisionAssertion() throws Exception { TokenProvider samlTokenProvider = new SAMLTokenProvider(); TokenProviderParameters providerParameters = createProviderParameters(WSS4JConstants.WSS_SAML2_TOKEN_TYPE, STSConstants.BEARER_KEY_KEYTYPE); List<AuthDecisionStatementProvider> customProviderList = Collections.singletonList( new CustomAuthDecisionProvider()); ((SAMLTokenProvider)samlTokenProvider).setAuthDecisionStatementProviders(customProviderList); assertTrue(samlTokenProvider.canHandleToken(WSS4JConstants.WSS_SAML2_TOKEN_TYPE)); TokenProviderResponse providerResponse = samlTokenProvider.createToken(providerParameters); assertNotNull(providerResponse); assertTrue(providerResponse.getToken() != null && providerResponse.getTokenId() != null); Element token = (Element)providerResponse.getToken(); String tokenString = DOM2Writer.nodeToString(token); assertTrue(tokenString.contains(providerResponse.getTokenId())); assertFalse(tokenString.contains("AttributeStatement")); assertFalse(tokenString.contains("AuthnStatement")); assertTrue(tokenString.contains("AuthzDecisionStatement")); assertTrue(tokenString.contains("alice")); } /** * Create a Saml1 Attribute Assertion with a custom Subject */ @org.junit.Test public void testCustomSaml1SubjectAssertion() throws Exception { TokenProvider samlTokenProvider = new SAMLTokenProvider(); TokenProviderParameters providerParameters = createProviderParameters(WSS4JConstants.WSS_SAML_TOKEN_TYPE, STSConstants.BEARER_KEY_KEYTYPE); ((SAMLTokenProvider)samlTokenProvider).setSubjectProvider(new CustomSubjectProvider()); assertTrue(samlTokenProvider.canHandleToken(WSS4JConstants.WSS_SAML_TOKEN_TYPE)); TokenProviderResponse providerResponse = samlTokenProvider.createToken(providerParameters); assertNotNull(providerResponse); assertTrue(providerResponse.getToken() != null && providerResponse.getTokenId() != null); Element token = (Element)providerResponse.getToken(); String tokenString = DOM2Writer.nodeToString(token); assertTrue(tokenString.contains(providerResponse.getTokenId())); assertTrue(tokenString.contains("AttributeStatement")); assertFalse(tokenString.contains("AuthenticationStatement")); assertTrue(tokenString.contains("alice")); assertTrue(tokenString.contains("http://cxf.apache.org/sts/custom")); } /** * Create a Saml1 Assertion with a custom NameID Format of the Subject */ @org.junit.Test public void testCustomSaml1SubjectNameIDFormat() throws Exception { TokenProvider samlTokenProvider = new SAMLTokenProvider(); TokenProviderParameters providerParameters = createProviderParameters(WSS4JConstants.WSS_SAML_TOKEN_TYPE, STSConstants.BEARER_KEY_KEYTYPE); DefaultSubjectProvider subjectProvider = new DefaultSubjectProvider(); subjectProvider.setSubjectNameIDFormat(SAML1Constants.NAMEID_FORMAT_EMAIL_ADDRESS); ((SAMLTokenProvider)samlTokenProvider).setSubjectProvider(subjectProvider); assertTrue(samlTokenProvider.canHandleToken(WSS4JConstants.WSS_SAML_TOKEN_TYPE)); TokenProviderResponse providerResponse = samlTokenProvider.createToken(providerParameters); assertNotNull(providerResponse); assertTrue(providerResponse.getToken() != null && providerResponse.getTokenId() != null); Element token = (Element)providerResponse.getToken(); String tokenString = DOM2Writer.nodeToString(token); assertTrue(tokenString.contains(providerResponse.getTokenId())); assertTrue(tokenString.contains("AttributeStatement")); assertFalse(tokenString.contains("AuthenticationStatement")); assertTrue(tokenString.contains("alice")); assertTrue(tokenString.contains(SAML1Constants.NAMEID_FORMAT_EMAIL_ADDRESS)); } private TokenProviderParameters createProviderParameters( String tokenType, String keyType ) throws WSSecurityException { TokenProviderParameters parameters = new TokenProviderParameters(); TokenRequirements tokenRequirements = new TokenRequirements(); tokenRequirements.setTokenType(tokenType); parameters.setTokenRequirements(tokenRequirements); KeyRequirements keyRequirements = new KeyRequirements(); keyRequirements.setKeyType(keyType); parameters.setKeyRequirements(keyRequirements); parameters.setPrincipal(new CustomTokenPrincipal("alice")); // Mock up message context MessageImpl msg = new MessageImpl(); WrappedMessageContext msgCtx = new WrappedMessageContext(msg); parameters.setMessageContext(msgCtx); parameters.setAppliesToAddress("http://dummy-service.com/dummy"); // Add STSProperties object StaticSTSProperties stsProperties = new StaticSTSProperties(); Crypto crypto = CryptoFactory.getInstance(getEncryptionProperties()); stsProperties.setEncryptionCrypto(crypto); stsProperties.setSignatureCrypto(crypto); stsProperties.setEncryptionUsername("myservicekey"); stsProperties.setSignatureUsername("mystskey"); stsProperties.setCallbackHandler(new PasswordCallbackHandler()); stsProperties.setIssuer("STS"); parameters.setStsProperties(stsProperties); parameters.setEncryptionProperties(new EncryptionProperties()); return parameters; } private Properties getEncryptionProperties() { Properties properties = new Properties(); properties.put( "org.apache.wss4j.crypto.provider", "org.apache.wss4j.common.crypto.Merlin" ); properties.put("org.apache.wss4j.crypto.merlin.keystore.password", "<PASSWORD>"); properties.put("org.apache.wss4j.crypto.merlin.keystore.file", "keys/stsstore.jks"); return properties; } }
5,361
721
<reponame>FinalCraftMC/EnderIO package crazypants.enderio.integration.forestry.upgrades; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.enderio.core.common.mixin.SimpleMixin; import crazypants.enderio.api.upgrades.IDarkSteelItem; import crazypants.enderio.base.item.darksteel.ItemDarkSteelArmor; import forestry.api.apiculture.IArmorApiarist; import forestry.api.core.IArmorNaturalist; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.EntityEquipmentSlot; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; @SimpleMixin(dependencies = "forestry", value = ItemDarkSteelArmor.class) public abstract class ArmorMixin extends Item implements IArmorApiarist, IArmorNaturalist, IDarkSteelItem { @Override public boolean canSeePollination(@Nonnull EntityPlayer player, @Nonnull ItemStack armor, boolean doSee) { return isForSlot(EntityEquipmentSlot.HEAD) && NaturalistEyeUpgrade.INSTANCE.hasUpgrade(player.getItemStackFromSlot(EntityEquipmentSlot.HEAD)); } @Override public boolean protectEntity(@Nonnull EntityLivingBase entity, @Nonnull ItemStack armor, @Nullable String cause, boolean doProtect) { return ApiaristArmorUpgrade.HELMET.hasUpgrade(armor) || ApiaristArmorUpgrade.CHEST.hasUpgrade(armor) || ApiaristArmorUpgrade.BOOTS.hasUpgrade(armor) || ApiaristArmorUpgrade.LEGS.hasUpgrade(armor); } }
446
361
from linear_attention_transformer.linear_attention_transformer import LinearAttentionTransformer, LinearAttentionTransformerLM, LinformerSettings, LinformerContextSettings from linear_attention_transformer.autoregressive_wrapper import AutoregressiveWrapper from linear_attention_transformer.images import ImageLinearAttention
77
2,996
<reponame>Elyahu41/Terasology<filename>engine/src/main/java/org/terasology/engine/logic/behavior/DefaultCollectiveBehaviorTreeRunner.java // Copyright 2021 The Terasology Foundation // SPDX-License-Identifier: Apache-2.0 package org.terasology.engine.logic.behavior; import org.terasology.engine.logic.behavior.asset.BehaviorTree; import org.terasology.engine.logic.behavior.core.Actor; import org.terasology.engine.logic.behavior.core.BehaviorNode; import org.terasology.engine.logic.behavior.core.BehaviorState; import org.terasology.engine.logic.behavior.core.CollectiveBehaviorTreeRunner; import org.terasology.engine.logic.behavior.core.DelegateNode; import java.util.Set; /** * Tree runner, that runs the given tree for a group of actors. */ public class DefaultCollectiveBehaviorTreeRunner implements CollectiveBehaviorTreeRunner { private final BehaviorTree tree; private final BehaviorNode root; private Callback callback; private Set<Actor> actors; private BehaviorState state = BehaviorState.UNDEFINED; public DefaultCollectiveBehaviorTreeRunner(BehaviorNode node, Set<Actor> actors) { this.tree = null; this.root = node.deepCopy(); this.actors = actors; } public DefaultCollectiveBehaviorTreeRunner(BehaviorTree tree, Set<Actor> actors, Callback callback) { this.callback = callback; this.tree = tree; this.root = injectDelegates(tree.getRoot().deepCopy(), tree.getRoot()); this.actors = actors; } private BehaviorNode injectDelegates(BehaviorNode newNode, BehaviorNode treeNode) { if (newNode.getChildrenCount() == 0) { return createCallbackNode(newNode, treeNode); } else { for (int i = 0; i < newNode.getChildrenCount(); i++) { newNode.replaceChild(i, injectDelegates(newNode.getChild(i), treeNode.getChild(i))); } return createCallbackNode(newNode, treeNode); } } private DelegateNode createCallbackNode(BehaviorNode newNode, final BehaviorNode treeNode) { return new DelegateNode(newNode) { @Override public BehaviorState execute(Actor theActor) { BehaviorState result = super.execute(theActor); if (callback != null) { callback.afterExecute(treeNode, result); } return result; } }; } @Override public BehaviorTree getTree() { return tree; } @Override public BehaviorState step() { for (Actor actor: this.actors) { if (state != BehaviorState.RUNNING) { root.construct(actor); } state = root.execute(actor); if (state != BehaviorState.RUNNING) { root.destruct(actor); } } return state; } @Override public Set<Actor> getActors() { return actors; } @Override public void setActors(Set<Actor> actors) { this.actors = actors; } }
1,240
1,122
<gh_stars>1000+ // Copyright (c) Microsoft Corporation // SPDX-License-Identifier: MIT #pragma once #include "ebpf_platform.h" #include "ebpf_structs.h" #include "framework.h" #ifdef __cplusplus extern "C" { #endif typedef enum _ebpf_object_type { EBPF_OBJECT_UNKNOWN, EBPF_OBJECT_MAP, EBPF_OBJECT_LINK, EBPF_OBJECT_PROGRAM, } ebpf_object_type_t; typedef struct _ebpf_object ebpf_object_t; typedef void (*ebpf_free_object_t)(ebpf_object_t* object); typedef const ebpf_program_type_t* (*ebpf_object_get_program_type_t)(_In_ const ebpf_object_t* object); // This type probably ought to be renamed to avoid confusion with // ebpf_object_t in libs\api\api_internal.h typedef struct _ebpf_object { uint32_t marker; volatile int32_t reference_count; ebpf_object_type_t type; ebpf_free_object_t free_function; ebpf_object_get_program_type_t get_program_type; // ID for this object. ebpf_id_t id; // Used to insert object in an object specific list. ebpf_list_entry_t object_list_entry; // # of pinned paths, for diagnostic purposes. uint32_t pinned_path_count; } ebpf_object_t; /** * @brief Initiate object tracking. * */ void ebpf_object_tracking_initiate(); /** * @brief Terminate object tracking. * */ void ebpf_object_tracking_terminate(); /** * @brief Initialize an ebpf_object_t structure. * * @param[in,out] object ebpf_object_t structure to initialize. * @param[in] object_type The type of the object. * @param[in] free_function The function used to free the object. * @param[in] get_program_type_function The function used to get a program type, or NULL. Each program * has a program type, and hence so do maps that can contain programs, whether directly (like * BPF_MAP_TYPE_PROG_ARRAY) or indirectly (like BPF_MAP_TYPE_ARRAY_OF_MAPS containing a BPF_MAP_TYPE_PROG_ARRAY). * @retval EBPF_SUCCESS Initialization succeeded. * @retval EBPF_NO_MEMORY Could not insert into the tracking table. */ ebpf_result_t ebpf_object_initialize( ebpf_object_t* object, ebpf_object_type_t object_type, ebpf_free_object_t free_function, ebpf_object_get_program_type_t get_program_type_function); /** * @brief Acquire a reference to this object. * * @param[in] object Object on which to acquire a reference. */ void ebpf_object_acquire_reference(ebpf_object_t* object); /** * @brief Release a reference on this object. If the reference count reaches * zero, the free_function is invoked on the object. * * @param[in] object Object on which to release a reference. */ void ebpf_object_release_reference(ebpf_object_t* object); /** * @brief Query the stored type of the object. * * @param[in] object Object to be queried. * @return Type of the object. */ ebpf_object_type_t ebpf_object_get_type(ebpf_object_t* object); /** * @brief Find the next object that is of this type and acquire reference * on it. * * @param[in] previous_object Previous object that was found. Can be NULL * to find first object. * @param[in] type Type of object to find. * @param[out] next_object Pointer to memory containing the next object or * NULL if there are no more objects of that type. */ void ebpf_object_reference_next_object( ebpf_object_t* previous_object, ebpf_object_type_t type, ebpf_object_t** next_object); /** * @brief Find an ID in the ID table, verify the type matches, * acquire a reference to the object and return it. * * @param[in] id ID to find in table. * @param[in] object_type Object type to match. * @param[out] object Pointer to memory that contains object success. * @retval EBPF_SUCCESS The operation was successful. * @retval EBPF_KEY_NOT_FOUND The provided ID is not valid. */ ebpf_result_t ebpf_object_reference_by_id(ebpf_id_t id, ebpf_object_type_t object_type, _Outptr_ ebpf_object_t** object); /** * @brief Find an ID in the ID table, verify the type matches, * and release a reference previously acquired via * ebpf_object_reference_id. * * @param[in] id ID to find in table. * @param[in] object_type Object type to match. * @retval EBPF_SUCCESS The operation was successful. * @retval EBPF_KEY_NOT_FOUND The provided ID is not valid. */ ebpf_result_t ebpf_object_dereference_by_id(ebpf_id_t id, ebpf_object_type_t object_type); /** * @brief Find the object of a given type with the next ID greater than a given ID. * * @param[in] start_id ID to look for an ID after. The start_id * need not exist. * @retval EBPF_SUCCESS The operation was successful. * @retval EBPF_NO_MORE_KEYS No such IDs found. */ ebpf_result_t ebpf_object_get_next_id(ebpf_id_t start_id, ebpf_object_type_t object_type, _Out_ ebpf_id_t* next_id); #ifdef __cplusplus } #endif
2,176
8,027
import unittest class Test(unittest.TestCase): @classmethod def setUpClass(cls): raise Exception("setup failure!") def test_that_passes(self): pass
74
521
/* $Id: pkix-verify.cpp $ */ /** @file * IPRT - Crypto - Public Key Infrastructure API, Verification. */ /* * Copyright (C) 2006-2017 Oracle Corporation * * This file is part of VirtualBox Open Source Edition (OSE), as * available from http://www.virtualbox.org. This file is free software; * you can redistribute it and/or modify it under the terms of the GNU * General Public License (GPL) as published by the Free Software * Foundation, in version 2 as it comes in the "COPYING" file of the * VirtualBox OSE distribution. VirtualBox OSE is distributed in the * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind. * * The contents of this file may alternatively be used under the terms * of the Common Development and Distribution License Version 1.0 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the * VirtualBox OSE distribution, in which case the provisions of the * CDDL are applicable instead of those of the GPL. * * You may elect to license modified versions of this file under the * terms and conditions of either the GPL or the CDDL or both. */ /********************************************************************************************************************************* * Header Files * *********************************************************************************************************************************/ #include "internal/iprt.h" #include <iprt/crypto/pkix.h> #include <iprt/err.h> #include <iprt/string.h> #include <iprt/crypto/digest.h> #ifdef IPRT_WITH_OPENSSL # include "internal/iprt-openssl.h" # include "openssl/evp.h" # ifndef OPENSSL_VERSION_NUMBER # error "Missing OPENSSL_VERSION_NUMBER!" # endif #endif RTDECL(int) RTCrPkixPubKeyVerifySignature(PCRTASN1OBJID pAlgorithm, PCRTASN1DYNTYPE pParameters, PCRTASN1BITSTRING pPublicKey, PCRTASN1BITSTRING pSignatureValue, const void *pvData, size_t cbData, PRTERRINFO pErrInfo) { /* * Valid input. */ AssertPtrReturn(pAlgorithm, VERR_INVALID_POINTER); AssertReturn(RTAsn1ObjId_IsPresent(pAlgorithm), VERR_INVALID_POINTER); if (pParameters) { AssertPtrReturn(pParameters, VERR_INVALID_POINTER); if (pParameters->enmType == RTASN1TYPE_NULL) pParameters = NULL; } AssertPtrReturn(pPublicKey, VERR_INVALID_POINTER); AssertReturn(RTAsn1BitString_IsPresent(pPublicKey), VERR_INVALID_POINTER); AssertPtrReturn(pSignatureValue, VERR_INVALID_POINTER); AssertReturn(RTAsn1BitString_IsPresent(pSignatureValue), VERR_INVALID_POINTER); AssertPtrReturn(pvData, VERR_INVALID_POINTER); AssertReturn(cbData > 0, VERR_INVALID_PARAMETER); /* * Parameters are not currently supported (openssl code path). */ if (pParameters) return RTErrInfoSet(pErrInfo, VERR_CR_PKIX_CIPHER_ALGO_PARAMS_NOT_IMPL, "Cipher algorithm parameters are not yet supported."); /* * Validate using IPRT. */ RTCRPKIXSIGNATURE hSignature; int rcIprt = RTCrPkixSignatureCreateByObjId(&hSignature, pAlgorithm, false /*fSigning*/, pPublicKey, pParameters); if (RT_FAILURE(rcIprt)) return RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_CIPHER_ALGO_NOT_KNOWN, "Unknown public key algorithm [IPRT]: %s", pAlgorithm->szObjId); RTCRDIGEST hDigest; rcIprt = RTCrDigestCreateByObjId(&hDigest, pAlgorithm); if (RT_SUCCESS(rcIprt)) { /* Calculate the digest. */ rcIprt = RTCrDigestUpdate(hDigest, pvData, cbData); if (RT_SUCCESS(rcIprt)) { rcIprt = RTCrPkixSignatureVerifyBitString(hSignature, hDigest, pSignatureValue); if (RT_FAILURE(rcIprt)) RTErrInfoSet(pErrInfo, rcIprt, "RTCrPkixSignatureVerifyBitString failed"); } else RTErrInfoSet(pErrInfo, rcIprt, "RTCrDigestUpdate failed"); RTCrDigestRelease(hDigest); } else RTErrInfoSetF(pErrInfo, rcIprt, "Unknown digest algorithm [IPRT]: %s", pAlgorithm->szObjId); RTCrPkixSignatureRelease(hSignature); #ifdef IPRT_WITH_OPENSSL /* * Validate using OpenSSL EVP. */ rtCrOpenSslInit(); /* Translate the algorithm ID into a EVP message digest type pointer. */ int iAlgoNid = OBJ_txt2nid(pAlgorithm->szObjId); if (iAlgoNid == NID_undef) return RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_CIPHER_ALGO_NOT_KNOWN, "Unknown public key algorithm [OpenSSL]: %s", pAlgorithm->szObjId); const char *pszAlgoSn = OBJ_nid2sn(iAlgoNid); # if OPENSSL_VERSION_NUMBER >= 0x10001000 && !defined(LIBRESSL_VERSION_NUMBER) int idAlgoPkey = 0; int idAlgoMd = 0; if (!OBJ_find_sigid_algs(iAlgoNid, &idAlgoMd, &idAlgoPkey)) return RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_CIPHER_ALGO_NOT_KNOWN_EVP, "OBJ_find_sigid_algs failed on %u (%s, %s)", iAlgoNid, pszAlgoSn, pAlgorithm->szObjId); const EVP_MD *pEvpMdType = EVP_get_digestbynid(idAlgoMd); if (!pEvpMdType) return RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_CIPHER_ALGO_NOT_KNOWN_EVP, "EVP_get_digestbynid failed on %d (%s, %s)", idAlgoMd, pszAlgoSn, pAlgorithm->szObjId); # else const EVP_MD *pEvpMdType = EVP_get_digestbyname(pszAlgoSn); if (!pEvpMdType) return RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_CIPHER_ALGO_NOT_KNOWN_EVP, "EVP_get_digestbyname failed on %s (%s)", pszAlgoSn, pAlgorithm->szObjId); # endif EVP_MD_CTX *pEvpMdCtx = EVP_MD_CTX_create(); if (!pEvpMdCtx) return RTErrInfoSetF(pErrInfo, VERR_NO_MEMORY, "EVP_MD_CTX_create failed"); int rcOssl; if (EVP_VerifyInit_ex(pEvpMdCtx, pEvpMdType, NULL /*engine*/)) { /* Create an EVP public key. */ EVP_PKEY *pEvpPublicKey = EVP_PKEY_new(); if (pEvpPublicKey) { # if OPENSSL_VERSION_NUMBER >= 0x10001000 && !defined(LIBRESSL_VERSION_NUMBER) if (EVP_PKEY_set_type(pEvpPublicKey, idAlgoPkey)) { int idKeyType = EVP_PKEY_base_id(pEvpPublicKey); # else int idKeyType = pEvpPublicKey->type = EVP_PKEY_type(pEvpMdType->required_pkey_type[0]); # endif if (idKeyType != NID_undef) { const unsigned char *puchPublicKey = RTASN1BITSTRING_GET_BIT0_PTR(pPublicKey); if (d2i_PublicKey(idKeyType, &pEvpPublicKey, &puchPublicKey, RTASN1BITSTRING_GET_BYTE_SIZE(pPublicKey))) { /* Digest the data. */ EVP_VerifyUpdate(pEvpMdCtx, pvData, cbData); /* Verify the signature. */ if (EVP_VerifyFinal(pEvpMdCtx, RTASN1BITSTRING_GET_BIT0_PTR(pSignatureValue), RTASN1BITSTRING_GET_BYTE_SIZE(pSignatureValue), pEvpPublicKey) > 0) rcOssl = VINF_SUCCESS; else rcOssl = RTErrInfoSet(pErrInfo, VERR_CR_PKIX_OSSL_VERIFY_FINAL_FAILED, "EVP_VerifyFinal failed"); } else rcOssl = RTErrInfoSet(pErrInfo, VERR_CR_PKIX_OSSL_D2I_PUBLIC_KEY_FAILED, "d2i_PublicKey failed"); } else # if OPENSSL_VERSION_NUMBER < 0x10001000 || defined(LIBRESSL_VERSION_NUMBER) rcOssl = RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_EVP_PKEY_TYPE_ERROR, "EVP_PKEY_type() failed"); # else rcOssl = RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_EVP_PKEY_TYPE_ERROR, "EVP_PKEY_base_id() failed"); } else rcOssl = RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_EVP_PKEY_TYPE_ERROR, "EVP_PKEY_set_type(%u) failed (sig algo %s)", idAlgoPkey, pszAlgoSn); # endif /* Cleanup and return.*/ EVP_PKEY_free(pEvpPublicKey); } else rcOssl = RTErrInfoSetF(pErrInfo, VERR_NO_MEMORY, "EVP_PKEY_new(%d) failed", iAlgoNid); } else rcOssl = RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_CIPHER_ALOG_INIT_FAILED, "EVP_VerifyInit_ex failed (algorithm type is %s / %s)", pszAlgoSn, pAlgorithm->szObjId); EVP_MD_CTX_destroy(pEvpMdCtx); /* * Check the result. */ if (RT_SUCCESS(rcIprt) && RT_SUCCESS(rcOssl)) return VINF_SUCCESS; if (RT_FAILURE_NP(rcIprt) && RT_FAILURE_NP(rcOssl)) return rcIprt; AssertMsgFailed(("rcIprt=%Rrc rcOssl=%Rrc\n", rcIprt, rcOssl)); if (RT_FAILURE_NP(rcOssl)) return rcOssl; #endif /* IPRT_WITH_OPENSSL */ return rcIprt; } RTDECL(int) RTCrPkixPubKeyVerifySignedDigest(PCRTASN1OBJID pAlgorithm, PCRTASN1DYNTYPE pParameters, PCRTASN1BITSTRING pPublicKey, void const *pvSignedDigest, size_t cbSignedDigest, RTCRDIGEST hDigest, PRTERRINFO pErrInfo) { /* * Valid input. */ AssertPtrReturn(pAlgorithm, VERR_INVALID_POINTER); AssertReturn(RTAsn1ObjId_IsPresent(pAlgorithm), VERR_INVALID_POINTER); if (pParameters) { AssertPtrReturn(pParameters, VERR_INVALID_POINTER); if (pParameters->enmType == RTASN1TYPE_NULL) pParameters = NULL; } AssertPtrReturn(pPublicKey, VERR_INVALID_POINTER); AssertReturn(RTAsn1BitString_IsPresent(pPublicKey), VERR_INVALID_POINTER); AssertPtrReturn(pvSignedDigest, VERR_INVALID_POINTER); AssertReturn(cbSignedDigest, VERR_INVALID_PARAMETER); AssertPtrReturn(hDigest, VERR_INVALID_HANDLE); /* * Parameters are not currently supported (openssl code path). */ if (pParameters) return RTErrInfoSet(pErrInfo, VERR_CR_PKIX_CIPHER_ALGO_PARAMS_NOT_IMPL, "Cipher algorithm parameters are not yet supported."); /* * Validate using IPRT. */ RTCRPKIXSIGNATURE hSignature; int rcIprt = RTCrPkixSignatureCreateByObjId(&hSignature, pAlgorithm, false /*fSigning*/, pPublicKey, pParameters); if (RT_FAILURE(rcIprt)) return RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_CIPHER_ALGO_NOT_KNOWN, "Unknown public key algorithm [IPRT]: %s", pAlgorithm->szObjId); rcIprt = RTCrPkixSignatureVerify(hSignature, hDigest, pvSignedDigest, cbSignedDigest); if (RT_FAILURE(rcIprt)) RTErrInfoSet(pErrInfo, rcIprt, "RTCrPkixSignatureVerifyBitString failed"); RTCrPkixSignatureRelease(hSignature); #if defined(IPRT_WITH_OPENSSL) \ && (OPENSSL_VERSION_NUMBER > 0x10000000L) /* 0.9.8 doesn't seem to have EVP_PKEY_CTX_set_signature_md. */ /* * Validate using OpenSSL EVP. */ rtCrOpenSslInit(); const char *pszAlgObjId = pAlgorithm->szObjId; if (!strcmp(pszAlgObjId, RTCRX509ALGORITHMIDENTIFIERID_RSA)) { pszAlgObjId = RTCrX509AlgorithmIdentifier_CombineEncryptionOidAndDigestOid(pszAlgObjId, RTCrDigestGetAlgorithmOid(hDigest)); AssertMsgStmt(pszAlgObjId, ("enc=%s hash=%s\n", pAlgorithm->szObjId, RTCrDigestGetAlgorithmOid(hDigest)), pszAlgObjId = RTCrDigestGetAlgorithmOid(hDigest)); } /* Translate the algorithm ID into a EVP message digest type pointer. */ int iAlgoNid = OBJ_txt2nid(pszAlgObjId); if (iAlgoNid == NID_undef) return RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_CIPHER_ALGO_NOT_KNOWN, "Unknown public key algorithm [OpenSSL]: %s", pszAlgObjId); const char *pszAlgoSn = OBJ_nid2sn(iAlgoNid); # if OPENSSL_VERSION_NUMBER >= 0x10001000 && !defined(LIBRESSL_VERSION_NUMBER) int idAlgoPkey = 0; int idAlgoMd = 0; if (!OBJ_find_sigid_algs(iAlgoNid, &idAlgoMd, &idAlgoPkey)) return RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_CIPHER_ALGO_NOT_KNOWN_EVP, "OBJ_find_sigid_algs failed on %u (%s, %s)", iAlgoNid, pszAlgoSn, pAlgorithm->szObjId); const EVP_MD *pEvpMdType = EVP_get_digestbynid(idAlgoMd); if (!pEvpMdType) return RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_CIPHER_ALGO_NOT_KNOWN_EVP, "EVP_get_digestbynid failed on %d (%s, %s)", idAlgoMd, pszAlgoSn, pAlgorithm->szObjId); # else const EVP_MD *pEvpMdType = EVP_get_digestbyname(pszAlgoSn); if (!pEvpMdType) return RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_CIPHER_ALGO_NOT_KNOWN_EVP, "EVP_get_digestbyname failed on %s (%s)", pszAlgoSn, pszAlgObjId); # endif /* Create an EVP public key. */ int rcOssl; EVP_PKEY *pEvpPublicKey = EVP_PKEY_new(); if (pEvpPublicKey) { # if OPENSSL_VERSION_NUMBER >= 0x10001000 && !defined(LIBRESSL_VERSION_NUMBER) if (EVP_PKEY_set_type(pEvpPublicKey, idAlgoPkey)) { int idKeyType = EVP_PKEY_base_id(pEvpPublicKey); # else int idKeyType = pEvpPublicKey->type = EVP_PKEY_type(pEvpMdType->required_pkey_type[0]); # endif if (idKeyType != NID_undef) { const unsigned char *puchPublicKey = RTASN1BITSTRING_GET_BIT0_PTR(pPublicKey); if (d2i_PublicKey(idKeyType, &pEvpPublicKey, &puchPublicKey, RTASN1BITSTRING_GET_BYTE_SIZE(pPublicKey))) { /* Create an EVP public key context we can use to validate the digest. */ EVP_PKEY_CTX *pEvpPKeyCtx = EVP_PKEY_CTX_new(pEvpPublicKey, NULL); if (pEvpPKeyCtx) { rcOssl = EVP_PKEY_verify_init(pEvpPKeyCtx); if (rcOssl > 0) { rcOssl = EVP_PKEY_CTX_set_signature_md(pEvpPKeyCtx, pEvpMdType); if (rcOssl > 0) { /* Get the digest from hDigest and verify it. */ rcOssl = EVP_PKEY_verify(pEvpPKeyCtx, (uint8_t const *)pvSignedDigest, cbSignedDigest, RTCrDigestGetHash(hDigest), RTCrDigestGetHashSize(hDigest)); if (rcOssl > 0) rcOssl = VINF_SUCCESS; else rcOssl = RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_VERIFY_FINAL_FAILED, "EVP_PKEY_verify failed (%d)", rcOssl); } else rcOssl = RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_EVP_PKEY_TYPE_ERROR, "EVP_PKEY_CTX_set_signature_md failed (%d)", rcOssl); } else rcOssl = RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_EVP_PKEY_TYPE_ERROR, "EVP_PKEY_verify_init failed (%d)", rcOssl); EVP_PKEY_CTX_free(pEvpPKeyCtx); } else rcOssl = RTErrInfoSet(pErrInfo, VERR_CR_PKIX_OSSL_EVP_PKEY_TYPE_ERROR, "EVP_PKEY_CTX_new failed"); } else rcOssl = RTErrInfoSet(pErrInfo, VERR_CR_PKIX_OSSL_D2I_PUBLIC_KEY_FAILED, "d2i_PublicKey failed"); } else # if OPENSSL_VERSION_NUMBER < 0x10001000 || defined(LIBRESSL_VERSION_NUMBER) rcOssl = RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_EVP_PKEY_TYPE_ERROR, "EVP_PKEY_type() failed"); # else rcOssl = RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_EVP_PKEY_TYPE_ERROR, "EVP_PKEY_base_id() failed"); } else rcOssl = RTErrInfoSetF(pErrInfo, VERR_CR_PKIX_OSSL_EVP_PKEY_TYPE_ERROR, "EVP_PKEY_set_type(%u) failed (sig algo %s)", idAlgoPkey, pszAlgoSn); # endif /* Cleanup and return.*/ EVP_PKEY_free(pEvpPublicKey); } else rcOssl = RTErrInfoSetF(pErrInfo, VERR_NO_MEMORY, "EVP_PKEY_new(%d) failed", iAlgoNid); /* * Check the result. */ if (RT_SUCCESS(rcIprt) && RT_SUCCESS(rcOssl)) return VINF_SUCCESS; if (RT_FAILURE_NP(rcIprt) && RT_FAILURE_NP(rcOssl)) return rcIprt; AssertMsgFailed(("rcIprt=%Rrc rcOssl=%Rrc\n", rcIprt, rcOssl)); if (RT_FAILURE_NP(rcOssl)) return rcOssl; #endif /* IPRT_WITH_OPENSSL */ return rcIprt; }
9,243
2,561
<gh_stars>1000+ # coding: utf-8 # In[1]: #shooting star is my friend's fav indicator #the name is poetic and romantic #it is merely a vertical flipped hammer #hammer and shooting star could be confusing #since both of them can be inverted #i memorize them via a simple tune #if u see thor (with hammer),price shall soar #if u see star (shooting star),price shall fall #details of shooting star can be found in investopedia # https://www.investopedia.com/terms/s/shootingstar.asp import pandas as pd import matplotlib.pyplot as plt import numpy as np import yfinance # In[2]: #criteria of shooting star def shooting_star(data,lower_bound,body_size): df=data.copy() #open>close,red color df['condition1']=np.where(df['Open']>=df['Close'],1,0) #a candle with little or no lower wick df['condition2']=np.where( (df['Close']-df['Low'])<lower_bound*abs( df['Close']-df['Open']),1,0) #a candle with a small lower body df['condition3']=np.where(abs( df['Open']-df['Close'])<abs( np.mean(df['Open']-df['Close']))*body_size,1,0) #a long upper wick that is at least two times the size of the lower body df['condition4']=np.where( (df['High']-df['Open'])>=2*( df['Open']-df['Close']),1,0) #price uptrend df['condition5']=np.where( df['Close']>=df['Close'].shift(1),1,0) df['condition6']=np.where( df['Close'].shift(1)>=df['Close'].shift(2),1,0) #the next candle's high must stay #below the high of the shooting star df['condition7']=np.where( df['High'].shift(-1)<=df['High'],1,0) #the next candle's close below #the close of the shooting star df['condition8']=np.where( df['Close'].shift(-1)<=df['Close'],1,0) return df # In[3]: #signal generation #there are eight criteria according to investopedia def signal_generation(df,method, lower_bound=0.2,body_size=0.5, stop_threshold=0.05, holding_period=7): #get shooting star conditions data=method(df,lower_bound,body_size) #shooting star should suffice all conditions #in practise,you may find the definition too rigid #its important to relax a bit on the body size data['signals']=data['condition1']*data[ 'condition2']*data['condition3']*data[ 'condition4']*data['condition5']*data[ 'condition6']*data['condition7']*data[ 'condition8'] #shooting star is a short signal data['signals']=-data['signals'] #find exit position idxlist=data[data['signals']==-1].index for ind in idxlist: #entry point entry_pos=data['Close'].loc[ind] stop=False counter=0 while not stop: ind+=1 counter+=1 #set stop loss/profit at +-5% if abs(data['Close'].loc[ ind]/entry_pos-1)>stop_threshold: stop=True data['signals'].loc[ind]=1 #set maximum holding period at 7 workdays if counter>=holding_period: stop=True data['signals'].loc[ind]=1 #create positions data['positions']=data['signals'].cumsum() return data # In[4]: #since matplotlib remove the candlestick #plus we dont wanna install mpl_finance #we implement our own version #simply use fill_between to construct the bar #use line plot to construct high and low def candlestick(df,ax=None,highlight=None,titlename='', highcol='High',lowcol='Low', opencol='Open',closecol='Close',xcol='Date', colorup='r',colordown='g',highlightcolor='y', **kwargs): #bar width #use 0.6 by default dif=[(-3+i)/10 for i in range(7)] if not ax: ax=plt.figure(figsize=(10,5)).add_subplot(111) #construct the bars one by one for i in range(len(df)): #width is 0.6 by default #so 7 data points required for each bar x=[i+j for j in dif] y1=[df[opencol].iloc[i]]*7 y2=[df[closecol].iloc[i]]*7 barcolor=colorup if y1[0]>y2[0] else colordown #no high line plot if open/close is high if df[highcol].iloc[i]!=max(df[opencol].iloc[i],df[closecol].iloc[i]): #use generic plot to viz high and low #use 1.001 as a scaling factor #to prevent high line from crossing into the bar plt.plot([i,i], [df[highcol].iloc[i], max(df[opencol].iloc[i], df[closecol].iloc[i])*1.001],c='k',**kwargs) #same as high if df[lowcol].iloc[i]!=min(df[opencol].iloc[i],df[closecol].iloc[i]): plt.plot([i,i], [df[lowcol].iloc[i], min(df[opencol].iloc[i], df[closecol].iloc[i])*0.999],c='k',**kwargs) #treat the bar as fill between plt.fill_between(x,y1,y2, edgecolor='k', facecolor=barcolor,**kwargs) if highlight: if df[highlight].iloc[i]==-1: plt.fill_between(x,y1,y2, edgecolor='k', facecolor=highlightcolor,**kwargs) #only show 5 xticks plt.xticks([]) plt.grid(True) plt.title(titlename) # In[5]: #plotting the backtesting result def plot(data,name): #first plot is candlestick to showcase ax1=plt.subplot2grid((250,1),(0,0), rowspan=120, ylabel='Candlestick') candlestick(data,ax1, highlight='signals', highlightcolor='#FFFF00') #the second plot is the actual price #with long/short positions as up/down arrows ax2=plt.subplot2grid((250,1),(130,0), rowspan=120, ylabel='£ per share', xlabel='Date') ax2.plot(data.index, data['Close'], label=name) #long/short positions are attached to #the real close price of the stock #set the line width to zero #thats why we only observe markers ax2.plot(data.loc[data['signals']==-1].index, data['Close'].loc[data['signals']==-1], marker='v',lw=0,c='r',label='short', markersize=10) ax2.plot(data.loc[data['signals']==1].index, data['Close'].loc[data['signals']==1], marker='^',lw=0,c='g',label='long', markersize=10) #only show five tickers plt.xticks(range(0,len(data),len(data)//5), data['Date'][0::len(data)//5].dt.date) plt.grid(True) plt.legend(loc='lower left') plt.tight_layout(pad=0.1) plt.show() # In[6]: def main(): #initializing stdate='2000-01-01' eddate='2021-11-04' name='Vodafone' ticker='VOD.L' df=yfinance.download(ticker,start=stdate,end=eddate) df.reset_index(inplace=True) df['Date']=pd.to_datetime(df['Date']) #signal generation new=signal_generation(df,shooting_star) #get subset for better viz to highlight shooting star subset=new.loc[5268:5283].copy() subset.reset_index(inplace=True,drop=True) #viz plot(subset,name) # In[7]: if __name__ == '__main__': main()
3,710
376
import os POVRAY_BINARY = ("povray.exe" if os.name=='nt' else "povray") GLOBAL_SCENE_SETTINGS = { "charset" : "ascii", "adc_bailout" : "1/255", "ambient_light" : (1,1,1), "assumed_gamma" : 1.0, "irid_wavelength" : (0.25,0.18,0.14), "max_trace_level" : 5, "max_intersections" : 64, "mm_per_unit" : 10, "number_of_waves" : 10, "noise_generator" : 2, "Radiosity":{ "adc_bailout" : 0.01, "always_sample" : "off", "brightness" : 1.0, "count" : 35, "error_bound" : 1.8, "gray_threshold" : 0.0, "low_error_factor" : 0.5, "max_sample" : -1, "maximum_reuse" : 0.2, "minimum_reuse" : 0.015, "nearest_count" : 5, "normal" : "off" , "pretrace_start" : 0.08, "pretrace_end" : 0.04, "recursion_limit" : 2, "subsurface" : "off"}, "Subsurface":{ "radiosity" : "off", "samples" : (50,50)}, }
602
1,001
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkdas.endpoint import endpoint_data class CreateCloudbenchTasksRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'DAS', '2020-01-16', 'CreateCloudbenchTasks','das') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_ClientType(self): return self.get_query_params().get('ClientType') def set_ClientType(self,ClientType): self.add_query_param('ClientType',ClientType) def get_DstPort(self): return self.get_query_params().get('DstPort') def set_DstPort(self,DstPort): self.add_query_param('DstPort',DstPort) def get_Description(self): return self.get_query_params().get('Description') def set_Description(self,Description): self.add_query_param('Description',Description) def get_RequestStartTime(self): return self.get_query_params().get('RequestStartTime') def set_RequestStartTime(self,RequestStartTime): self.add_query_param('RequestStartTime',RequestStartTime) def get_DstConnectionString(self): return self.get_query_params().get('DstConnectionString') def set_DstConnectionString(self,DstConnectionString): self.add_query_param('DstConnectionString',DstConnectionString) def get_DstSuperPassword(self): return self.get_query_params().get('DstSuperPassword') def set_DstSuperPassword(self,DstSuperPassword): self.add_query_param('DstSuperPassword',DstSuperPassword) def get_DstSuperAccount(self): return self.get_query_params().get('DstSuperAccount') def set_DstSuperAccount(self,DstSuperAccount): self.add_query_param('DstSuperAccount',DstSuperAccount) def get_DstInstanceId(self): return self.get_query_params().get('DstInstanceId') def set_DstInstanceId(self,DstInstanceId): self.add_query_param('DstInstanceId',DstInstanceId) def get_Rate(self): return self.get_query_params().get('Rate') def set_Rate(self,Rate): self.add_query_param('Rate',Rate) def get_RequestDuration(self): return self.get_query_params().get('RequestDuration') def set_RequestDuration(self,RequestDuration): self.add_query_param('RequestDuration',RequestDuration) def get_DtsJobId(self): return self.get_query_params().get('DtsJobId') def set_DtsJobId(self,DtsJobId): self.add_query_param('DtsJobId',DtsJobId) def get_RequestEndTime(self): return self.get_query_params().get('RequestEndTime') def set_RequestEndTime(self,RequestEndTime): self.add_query_param('RequestEndTime',RequestEndTime) def get_Amount(self): return self.get_query_params().get('Amount') def set_Amount(self,Amount): self.add_query_param('Amount',Amount) def get_TaskType(self): return self.get_query_params().get('TaskType') def set_TaskType(self,TaskType): self.add_query_param('TaskType',TaskType) def get_EndState(self): return self.get_query_params().get('EndState') def set_EndState(self,EndState): self.add_query_param('EndState',EndState) def get_BackupId(self): return self.get_query_params().get('BackupId') def set_BackupId(self,BackupId): self.add_query_param('BackupId',BackupId) def get_SrcSuperPassword(self): return self.get_query_params().get('SrcSuperPassword') def set_SrcSuperPassword(self,SrcSuperPassword): self.add_query_param('SrcSuperPassword',SrcSuperPassword) def get_BackupTime(self): return self.get_query_params().get('BackupTime') def set_BackupTime(self,BackupTime): self.add_query_param('BackupTime',BackupTime) def get_GatewayVpcIp(self): return self.get_query_params().get('GatewayVpcIp') def set_GatewayVpcIp(self,GatewayVpcIp): self.add_query_param('GatewayVpcIp',GatewayVpcIp) def get_WorkDir(self): return self.get_query_params().get('WorkDir') def set_WorkDir(self,WorkDir): self.add_query_param('WorkDir',WorkDir) def get_DtsJobClass(self): return self.get_query_params().get('DtsJobClass') def set_DtsJobClass(self,DtsJobClass): self.add_query_param('DtsJobClass',DtsJobClass) def get_SrcPublicIp(self): return self.get_query_params().get('SrcPublicIp') def set_SrcPublicIp(self,SrcPublicIp): self.add_query_param('SrcPublicIp',SrcPublicIp) def get_SrcInstanceId(self): return self.get_query_params().get('SrcInstanceId') def set_SrcInstanceId(self,SrcInstanceId): self.add_query_param('SrcInstanceId',SrcInstanceId) def get_DstType(self): return self.get_query_params().get('DstType') def set_DstType(self,DstType): self.add_query_param('DstType',DstType) def get_SrcSuperAccount(self): return self.get_query_params().get('SrcSuperAccount') def set_SrcSuperAccount(self,SrcSuperAccount): self.add_query_param('SrcSuperAccount',SrcSuperAccount) def get_GatewayVpcId(self): return self.get_query_params().get('GatewayVpcId') def set_GatewayVpcId(self,GatewayVpcId): self.add_query_param('GatewayVpcId',GatewayVpcId) def get_SmartPressureTime(self): return self.get_query_params().get('SmartPressureTime') def set_SmartPressureTime(self,SmartPressureTime): self.add_query_param('SmartPressureTime',SmartPressureTime)
2,332
14,668
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ui/browser_navigator.h" #include "chrome/test/base/testing_profile.h" #include "content/public/test/browser_task_environment.h" #include "testing/gtest/include/gtest/gtest.h" #include "url/gurl.h" class BrowserNavigatorUnitTest : public testing::Test { public: BrowserNavigatorUnitTest() = default; BrowserNavigatorUnitTest(const BrowserNavigatorUnitTest&) = delete; BrowserNavigatorUnitTest& operator=(const BrowserNavigatorUnitTest&) = delete; ~BrowserNavigatorUnitTest() override = default; private: content::BrowserTaskEnvironment task_environment_; }; // Ensure empty view source is allowed in Incognito. TEST_F(BrowserNavigatorUnitTest, EmptyViewSourceIncognito) { TestingProfile profile; EXPECT_TRUE(IsURLAllowedInIncognito(GURL("view-source:"), &profile)); }
292
335
{ "word": "Alliance", "definitions": [ "A union or association formed for mutual benefit, especially between countries or organizations.", "A relationship based on similarity of interests, nature, or qualities.", "The state of being joined or associated.", "A group of closely related plant associations." ], "parts-of-speech": "Noun" }
123
1,007
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from .mot import MOTAccumulator # NoQA import motmetrics.metrics import motmetrics.distances import motmetrics.io import motmetrics.utils # NoQA # Needs to be last line __version__ = '1.0.2'
106
468
#define GLI_INCLUDE_GL_OES_COMPRESSED_PALETTED_TEXTURE enum Main { GL_PALETTE4_RGB8_OES = 0x8B90, GL_PALETTE4_RGBA8_OES = 0x8B91, GL_PALETTE4_R5_G6_B5_OES = 0x8B92, GL_PALETTE4_RGBA4_OES = 0x8B93, GL_PALETTE4_RGB5_A1_OES = 0x8B94, GL_PALETTE8_RGB8_OES = 0x8B95, GL_PALETTE8_RGBA8_OES = 0x8B96, GL_PALETTE8_R5_G6_B5_OES = 0x8B97, GL_PALETTE8_RGBA4_OES = 0x8B98, GL_PALETTE8_RGB5_A1_OES = 0x8B99, };
358