max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
428
<filename>Java/Loon-Lite(PureJava)/Loon-Lite-Core/src/loon/action/ActionTween.java<gh_stars>100-1000 /** * Copyright 2008 - 2015 The Loon Game Engine Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * @project loon * @author cping * @email:<EMAIL> * @version 0.5 */ package loon.action; import loon.LSysException; import loon.LSystem; import loon.action.map.Field2D; import loon.action.sprite.ISprite; import loon.action.sprite.effect.BaseEffect; import loon.canvas.LColor; import loon.event.ActionUpdate; import loon.event.FrameLoopEvent; import loon.event.Updateable; import loon.geom.Bezier; import loon.geom.BooleanValue; import loon.geom.Vector2f; import loon.utils.Array; import loon.utils.Easing; import loon.utils.TArray; import loon.utils.Easing.EasingMode; import loon.utils.StringKeyValue; public class ActionTween extends ActionTweenBase<ActionTween> { private static int combinedAttrsLimit = 3; private static int funPointsLimit = 0; public static void setCombinedAttributesLimit(int limit) { ActionTween.combinedAttrsLimit = limit; } public static void setfunPointsLimit(int limit) { ActionTween.funPointsLimit = limit; } private static final ActionTweenPool.Callback<ActionTween> poolCallback = new ActionTweenPool.Callback<ActionTween>() { @Override public void onPool(ActionTween obj) { obj.reset(); } @Override public void onUnPool(ActionTween obj) { try { obj.reset(); onSuccess(obj); } catch (Throwable ex) { LSystem.error("Action Tween exception", ex); onFailure(ex); } } @Override public void onSuccess(ActionTween result) { } @Override public void onFailure(Throwable cause) { } }; private static final ActionTweenPool<ActionTween> pool = new ActionTweenPool<ActionTween>(20, poolCallback) { @Override protected ActionTween create() { return new ActionTween(); } }; /** * 从当前ActionBind数值到指定目标(大多数时候,调用此状态已经足够) * * @param target * 具体的操作对象 * @param tweenType * 需要转变的接口 * @param duration * 持续时间 * @return */ public static ActionTween to(ActionBind target, int tweenType, float duration) { ActionTween tween = pool.get(); tween.setup(target, tweenType, duration); tween.ease(Easing.QUAD_INOUT); tween.path(ActionControl.SMOOTH); return tween; } /** * 从注入的数值演变到当前值 * * @param target * @param tweenType * @param duration * @return */ public static ActionTween from(ActionBind target, int tweenType, float duration) { ActionTween tween = pool.get(); tween.setup(target, tweenType, duration); tween.ease(Easing.QUAD_INOUT); tween.path(ActionControl.SMOOTH); tween.isFrom = true; return tween; } /** * 直接注入当前对象为指定数值 * * @param target * @param tweenType * @return */ public static ActionTween set(ActionBind target, int tweenType) { ActionTween tween = pool.get(); tween.setup(target, tweenType, 0); tween.ease(Easing.QUAD_INOUT); return tween; } /** * 直接调用一个ActionCallback方法 * * @param callback * @return */ public static ActionTween call(ActionCallback callback) { ActionTween tween = pool.get(); tween.setup(null, -1, 0); tween.setCallback(callback); tween.setCallbackTriggers(ActionMode.START); return tween; } /** * 制作一个无状态的空ActionTween对象 * * @return */ public static ActionTween mark() { ActionTween tween = pool.get(); tween.setup(null, -1, 0); return tween; } public static int getPoolSize() { return pool.size(); } public static void resize(int minCapacity) { pool.resize(minCapacity); } private int type; private Easing equation; private ActionPath path; private boolean isFrom; private boolean isRelative; private boolean isRepeat; private int _combinedAttrsSize; private int _funPointsSize; private final float[] startValues = new float[combinedAttrsLimit]; private final float[] targetValues = new float[combinedAttrsLimit]; private final float[] funPoints = new float[funPointsLimit * combinedAttrsLimit]; private float[] accessorBuffer = new float[combinedAttrsLimit]; private float[] pathBuffer = new float[(2 + funPointsLimit) * combinedAttrsLimit]; private Array<ActionEvent> actionEvents; private ActionTween() { reset(); } public ActionTween select(boolean selected, ActionEvent a, ActionEvent b) { return event(selected ? a : b); } public ActionTween flashTo() { return event(new FlashTo()); } public ActionTween flashTo(float duration) { return event(new FlashTo(duration)); } public ActionTween flashTo(float duration, EasingMode easing) { return event(new FlashTo(duration, easing)); } public ActionTween flashTo(float duration, float delay, EasingMode easing) { return event(new FlashTo(duration, delay, easing)); } public ActionTween moveTo(float endX, float endY) { return moveTo(endX, endY, false, 8); } public ActionTween moveTo(float endX, float endY, ActionListener l) { return moveTo(endX, endY, false, 8, l); } public ActionTween moveTo(float endX, float endY, int speed) { return moveTo(endX, endY, false, speed); } public ActionTween moveTo(float endX, float endY, int speed, ActionListener l) { return moveTo(endX, endY, false, speed, l); } public ActionTween moveTo(float endX, float endY, boolean flag) { return moveTo(LSystem.viewSize.newField2D(), endX, endY, flag, 8, 0, 0, null); } public ActionTween moveTo(float endX, float endY, boolean flag, ActionListener l) { return moveTo(LSystem.viewSize.newField2D(), endX, endY, flag, 8, 0, 0, l); } public ActionTween moveTo(float endX, float endY, boolean flag, int speed) { return moveTo(LSystem.viewSize.newField2D(), endX, endY, flag, speed, 0, 0, null); } public ActionTween moveTo(float endX, float endY, boolean flag, int speed, ActionListener l) { return moveTo(LSystem.viewSize.newField2D(), endX, endY, flag, speed, 0, 0, l); } public ActionTween moveTo(float endX, float endY, boolean flag, float offsetX, float offsetY) { return moveTo(LSystem.viewSize.newField2D(), endX, endY, flag, 8, offsetX, offsetY, null); } public ActionTween moveTo(float endX, float endY, boolean flag, float offsetX, float offsetY, ActionListener l) { return moveTo(LSystem.viewSize.newField2D(), endX, endY, flag, 8, offsetX, offsetY, l); } public ActionTween moveTo(float endX, float endY, boolean flag, int speed, float offsetX, float offsetY) { return moveTo(LSystem.viewSize.newField2D(), endX, endY, flag, speed, offsetX, offsetY, null); } public ActionTween moveTo(float endX, float endY, boolean flag, int speed, float offsetX, float offsetY, ActionListener l) { return moveTo(LSystem.viewSize.newField2D(), endX, endY, flag, speed, offsetX, offsetY, l); } public ActionTween moveTo(Field2D map, float endX, float endY, boolean flag, int speed) { return moveTo(map, endX, endY, flag, speed, 0, 0, null); } public ActionTween moveTo(Field2D map, float endX, float endY, boolean flag, int speed, ActionListener l) { return moveTo(map, endX, endY, flag, speed, 0, 0, l); } public ActionTween moveTo(Field2D map, float endX, float endY, boolean flag, int speed, float offsetX, float offsetY, ActionListener l) { if (map != null && map.inside(endX, endY)) { MoveTo move = new MoveTo(map, endX, endY, flag, speed); move.setDelay(0); move.setOffset(offsetX, offsetY); return event(move, l); } else { return moveBy(endX, endY, speed, EasingMode.Linear, offsetX, offsetY, l); } } public ActionTween followTo(ActionBind actorToFollow) { return event(new FollowTo(actorToFollow)); } public ActionTween followTo(Field2D field2d, ActionBind actorToFollow) { return event(new FollowTo(field2d, actorToFollow)); } public ActionTween followTo(ActionBind actorToFollow, float speed) { return event(new FollowTo(actorToFollow, speed)); } public ActionTween followTo(Field2D field2d, ActionBind actorToFollow, float speed) { return event(new FollowTo(field2d, actorToFollow, speed)); } public ActionTween followTo(ActionBind actorToFollow, float follow, float speed) { return event(new FollowTo(actorToFollow, follow, speed)); } public ActionTween followTo(Field2D field2d, ActionBind actorToFollow, float follow, float speed) { return event(new FollowTo(field2d, actorToFollow, follow, speed)); } public ActionTween followTo(ActionBind actorToFollow, float vx, float vy, float follow, float speed) { return event(new FollowTo(actorToFollow, vx, vy, follow, speed)); } public ActionTween followTo(Field2D field2d, ActionBind actorToFollow, float vx, float vy, float follow, float speed) { return event(new FollowTo(field2d, actorToFollow, vx, vy, follow, speed)); } public ActionTween moveBy(float endX, float endY) { return moveBy(endX, endY, 8); } public ActionTween moveBy(float endX, float endY, ActionListener l) { return moveBy(endX, endY, 8, l); } public ActionTween moveBy(float endX, float endY, int speed) { return event(new MoveBy(endX, endY, speed), null); } public ActionTween moveBy(float endX, float endY, int speed, ActionListener l) { return event(new MoveBy(endX, endY, speed), l); } public ActionTween moveBy(float endX, float endY, float duration, float delay, EasingMode easing, float offsetX, float offsetY) { return event(new MoveBy(-1f, -1f, endX, endY, 0, duration, delay, easing, offsetX, offsetY)); } public ActionTween moveBy(float endX, float endY, float duration, float delay, EasingMode easing, float offsetX, float offsetY, ActionListener l) { return event(new MoveBy(-1f, -1f, endX, endY, 0, duration, delay, easing, offsetX, offsetY), l); } public ActionTween moveBy(float endX, float endY, int speed, EasingMode easing, float offsetX, float offsetY) { return moveBy(endX, endY, speed, easing, offsetX, offsetY, null); } public ActionTween moveBy(float endX, float endY, int speed, EasingMode easing, float offsetX, float offsetY, ActionListener l) { return event(new MoveBy(endX, endY, speed, easing, offsetX, offsetY), l); } public ActionTween moveBy(float startX, float startY, float endX, float endY, float duration, float delay, EasingMode easing, float offsetX, float offsetY) { return event(new MoveBy(startX, startY, endX, endY, 0, duration, delay, easing, offsetX, offsetY), null); } public ActionTween moveBy(float startX, float startY, float endX, float endY, float duration, float delay, EasingMode easing, float offsetX, float offsetY, ActionListener l) { return event(new MoveBy(startX, startY, endX, endY, 0, duration, delay, easing, offsetX, offsetY), l); } public ActionTween moveBy(float endX, float endY, EasingMode easing) { return event(new MoveBy(endX, endY, easing)); } public ActionTween moveBy(float endX, float endY, EasingMode easing, ActionListener l) { return event(new MoveBy(endX, endY, easing), l); } public ActionTween moveBy(float endX, float endY, float duration, float delay, EasingMode easing) { return event(new MoveBy(endX, endY, duration, delay, easing)); } public ActionTween moveBy(float endX, float endY, float duration, float delay, EasingMode easing, ActionListener l) { return event(new MoveBy(endX, endY, duration, delay, easing), l); } public ActionTween moveBy(float endX, float endY, float duration, EasingMode easing) { return event(new MoveBy(endX, endY, duration, easing)); } public ActionTween moveBy(float endX, float endY, float duration, EasingMode easing, ActionListener l) { return event(new MoveBy(endX, endY, duration, easing), l); } public ActionTween fadeIn(float speed) { return fadeTo(ISprite.TYPE_FADE_IN, speed); } public ActionTween fadeOut(float speed) { return fadeTo(ISprite.TYPE_FADE_OUT, speed); } public ActionTween fadeTo(int fadeMode, float speed) { return event(new FadeTo(fadeMode, (int) speed)); } public ActionTween rotateTo(float angle) { return rotateTo(angle, 6f); } public ActionTween rotateTo(float angle, float speed) { return rotateTo(angle, 1f, speed); } public ActionTween rotateTo(float angle, float diff, float speed) { return event(new RotateTo(angle, diff, speed)); } public ActionTween rotateTo(float angle, float diff, float speed, EasingMode easing) { return event(new RotateTo(angle, diff, speed, easing)); } public ActionTween rotateTo(float startRotation, float dstAngle, float diffAngle, float duration, float delay, EasingMode easing) { return event(new RotateTo(startRotation, dstAngle, diffAngle, duration, delay, easing)); } public ActionTween colorTo(LColor end) { return colorTo(null, end); } public ActionTween colorTo(LColor start, LColor end) { ColorTo color = new ColorTo(start, end, 1f); color.setDelay(0); return event(color); } public ActionTween colorTo(LColor start, LColor end, float delay) { return colorTo(start, end, delay, 1f); } public ActionTween colorTo(LColor start, LColor end, float duration, float delay) { ColorTo color = new ColorTo(start, end, duration, delay); color.setDelay(0); return event(color); } public ActionTween eventTo(FrameLoopEvent e) { EventTo event = new EventTo(e); event.setDelay(0); return event(event); } public ActionTween transferTo(float startPos, float endPos, EasingMode mode, boolean controlX, boolean controlY) { TransferTo transfer = new TransferTo(startPos, endPos, 1f, mode, controlX, controlY); transfer.setDelay(0); return event(transfer); } public ActionTween transferTo(float startPos, float endPos, float duration, EasingMode mode) { TransferTo transfer = new TransferTo(startPos, endPos, duration, mode); transfer.setDelay(0); return event(transfer); } public ActionTween transferTo(float startPos, float endPos, float duration, EasingMode mode, boolean controlX, boolean controlY) { TransferTo transfer = new TransferTo(startPos, endPos, duration, mode, controlX, controlY); transfer.setDelay(0); return event(transfer); } public ActionTween transferTo(float startPos, float endPos, float duration, float delay, EasingMode mode, boolean controlX, boolean controlY) { TransferTo transfer = new TransferTo(startPos, endPos, delay, duration, mode, controlX, controlY); transfer.setDelay(0); return event(transfer); } public ActionTween shakeTo(float shake) { return shakeTo(shake, shake); } public ActionTween shakeTo(float shakeX, float shakeY) { ShakeTo shake = new ShakeTo(shakeX, shakeY); shake.setDelay(0); return event(shake); } public ActionTween shakeTo(float shakeX, float shakeY, float duration) { ShakeTo shake = new ShakeTo(shakeX, shakeY, duration); shake.setDelay(0); return event(shake); } public ActionTween shakeTo(float shakeX, float shakeY, float duration, float delay) { ShakeTo shake = new ShakeTo(shakeX, shakeY, duration, delay); shake.setDelay(0); return event(shake); } public ActionTween shakeTo(float shakeX, float shakeY, float duration, float delay, EasingMode easing) { ShakeTo shake = new ShakeTo(shakeX, shakeY, duration, delay, easing); shake.setDelay(0); return event(shake); } public ActionTween scaleTo(float s) { return scaleTo(s, s, 0.1f); } public ActionTween scaleTo(float sx, float sy) { return scaleTo(sx, sy, 0.1f); } public ActionTween scaleTo(float sx, float sy, float speed) { ScaleTo scale = new ScaleTo(sx, sy); scale.setDelay(0); scale.setSpeed(speed); return event(scale); } public ActionTween showTo(boolean v) { ShowTo show = new ShowTo(v); show.setDelay(0); return event(show); } public ActionTween arrowTo(float x, float y) { return event(new ArrowTo(x, y)); } public ActionTween arrowTo(float x, float y, float g) { return event(new ArrowTo(x, y, g)); } public ActionTween arrowTo(float x, float y, float speed, float g, EasingMode easing) { return event(new ArrowTo(x, y, speed, g, easing)); } public ActionTween arrowTo(float tx, float ty, float speed, float g) { return event(new ArrowTo(tx, ty, speed, g)); } public ActionTween arrowTo(float st, float sy, float tx, float ty, float speed, float g, float duration, float delay, EasingMode easing) { return event(new ArrowTo(st, sy, tx, ty, speed, g, duration, delay, easing)); } public ActionTween circleTo(int radius, int velocity) { return event(new CircleTo(radius, velocity)); } public ActionTween circleTo(float centerX, float cenertY, int radius, int velocity) { return circleTo(-1, -1, radius, velocity, 0.1f); } public ActionTween circleTo(float centerX, float cenertY, int radius, int velocity, float speed) { return event(new CircleTo(centerX, cenertY, radius, velocity, speed)); } public ActionTween effectTo(BaseEffect eff) { return event(new EffectTo(eff)); } public ActionTween fireTo(float endX, float endY, float speed) { return event(new FireTo(endX, endY, speed)); } public ActionTween jumpTo(int moveJump, float gravity) { return event(new JumpTo(moveJump, gravity)); } /** * <p> * 需要[并行]的缓动动画事件在此注入 * </p> * * 如果要[并行](也就是旋转,变色什么的一起来)进行缓动动画,而非分别进行,请把要演示的ActionEvent注入此类,此类用于同时运行多个ActionEvent * * @param eves * @return */ public ActionTween parallelTo(ActionEvent... eves) { return event(new ParallelTo(eves)); } /** * <p> * 需要[并行]的缓动动画事件在此注入 * </p> * * 如果要[并行](也就是旋转,变色什么的一起来)进行缓动动画,而非分别进行,请把要演示的ActionEvent注入此类,此类用于同时运行多个ActionEvent * * @param list * @return */ public ActionTween parallelTo(TArray<ActionEvent> list) { return event(new ParallelTo(list)); } public ActionTween updateTo(Updateable u) { return event(new UpdateTo(u)); } public ActionTween waitTo(ActionUpdate au) { return event(new WaitTo(au)); } public ActionTween transformPos(float x, float y) { return event(TransformTo.pos(x, y)); } public ActionTween transformScale(float scaleX, float scaleY) { return event(TransformTo.scale(scaleX, scaleY)); } public ActionTween transformAlpha(float newAlpha) { return event(TransformTo.alpha(newAlpha)); } public ActionTween transformRotation(float newRotation) { return event(TransformTo.rotation(newRotation)); } public ActionTween transformColor(LColor newColor) { return event(TransformTo.color(newColor)); } public ActionTween waitTo(BooleanValue bv) { return event(new WaitTo(bv)); } public ActionTween moveRoundTo(float angle, float radius, Vector2f centerPoint, EasingMode easing) { return event(new MoveRoundTo(angle, radius, centerPoint, easing)); } public ActionTween moveRoundTo(float angle, float radius, Vector2f centerPoint, float duration, EasingMode easing) { return event(new MoveRoundTo(angle, radius, centerPoint, duration, easing)); } public ActionTween moveRoundTo(float startAngle, float angle, float startRadius, float radius, Vector2f centerPoint, Vector2f startPoint, float duration, float delay, EasingMode easing) { return event(new MoveRoundTo(startAngle, angle, startRadius, radius, centerPoint, startPoint, duration, delay, easing)); } public ActionTween moveOvalTo(float angle, float width, float height, Vector2f centerPoint, float duration, EasingMode easing) { return event(new MoveOvalTo(0, angle, width, height, centerPoint, duration, easing)); } public ActionTween moveOvalTo(float startAngle, float angle, float width, float height, Vector2f centerPoint, float duration, EasingMode easing) { return event(new MoveOvalTo(startAngle, angle, width, height, centerPoint, duration, easing)); } public ActionTween moveOvalTo(float startAngle, float angle, float width, float height, Vector2f centerPoint, Vector2f startPoint, float duration, float delay, EasingMode easing) { return event( new MoveOvalTo(startAngle, angle, width, height, centerPoint, startPoint, duration, delay, easing)); } public ActionTween bezierBy(float duration, Bezier b) { return event(new BezierBy(duration, b)); } public ActionTween bezierBy(float sx, float sy, float duration, Bezier b) { return event(new BezierBy(sx, sy, duration, b)); } public ActionTween bezierBy(float sx, float sy, float duration, EasingMode mode, Bezier b) { return event(new BezierBy(sx, sy, duration, mode, b)); } public ActionTween bezierTo(float duration, Bezier b) { return event(new BezierTo(duration, b)); } public ActionTween bezierTo(float sx, float sy, float duration, Bezier b) { return event(new BezierTo(sx, sy, duration, b)); } public ActionTween bezierTo(float sx, float sy, float duration, EasingMode mode, Bezier b) { return event(new BezierTo(sx, sy, duration, mode, b)); } public ActionTween flipX(boolean x) { return event(new FlipXTo(x)); } public ActionTween flipY(boolean y) { return event(new FlipYTo(y)); } public ActionTween removeActionsTo(ActionBind bind) { return event(new RemoveActionsTo(bind)); } public ActionTween removeActionsTo() { return removeActionsTo(null); } /** * 监听所有指定名称的已注入事件 * * @param name * @param listener * @return */ public ActionTween listenTags(Object tag, ActionListener listener) { if (actionEvents == null || tag == null) { return this; } for (; actionEvents.hashNext();) { ActionEvent tmp = actionEvents.next(); if (tmp != null) { if (tag.equals(tmp.tag) || tmp.tag == tag) { tmp.setActionListener(listener); } } } actionEvents.stopNext(); return this; } /** * 监听所有指定名称的已注入事件 * * @param name * @param listener * @return */ public ActionTween listenNames(String name, ActionListener listener) { if (actionEvents == null || name == null) { return this; } String findName = name.trim().toLowerCase(); for (; actionEvents.hashNext();) { ActionEvent tmp = actionEvents.next(); if (tmp != null) { if (findName.equals(tmp.getName())) { tmp.setActionListener(listener); } } } actionEvents.stopNext(); return this; } /** * 停止所有指定名的动画 * * @param name * @return */ public ActionTween killNames(String name) { if (actionEvents == null || name == null) { return this; } String findName = name.trim().toLowerCase(); for (; actionEvents.hashNext();) { ActionEvent tmp = actionEvents.next(); if (tmp != null) { if (findName.equals(tmp.getName())) { tmp.kill(); } } } actionEvents.stopNext(); return this; } /** * 停止所有包含指定标记的动画 * * @param tag * @return */ public ActionTween killTags(Object tag) { if (actionEvents == null || tag == null) { return this; } for (; actionEvents.hashNext();) { ActionEvent tmp = actionEvents.next(); if (tmp != null) { if (tag.equals(tmp.tag) || tmp.tag == tag) { tmp.kill(); } } } actionEvents.stopNext(); return this; } public ActionTween loop(int count) { return loop(count, false); } public ActionTween loop(int count, boolean reverse) { if (actionEvents == null) { return this; } if (count < 1) { return this; } if (count == 1) { count++; } ActionEvent e = null; Array<ActionEvent> tmps = new Array<ActionEvent>(); for (int i = 0; i < count - 1; i++) { for (; actionEvents.hashNext();) { ActionEvent tmp = actionEvents.next(); if (tmp != null) { e = tmp; } tmps.add(reverse ? e.reverse() : e.cpy()); } actionEvents.stopNext(); } actionEvents.addAll(tmps); return this; } public ActionTween loopLast(int count) { return loopLast(count, false); } public ActionTween loopLast(int count, boolean reverse) { if (actionEvents == null) { return this; } if (count < 1) { return this; } if (count == 1) { count++; } Array<ActionEvent> tmps = new Array<ActionEvent>(); for (int i = 0; i < count - 1; i++) { tmps.add(reverse ? actionEvents.last().reverse() : actionEvents.last().cpy()); } actionEvents.addAll(tmps); return this; } public TArray<ActionEvent> getActionEvents() { if (actionEvents == null) { return new TArray<ActionEvent>(0); } return new TArray<ActionEvent>(actionEvents); } @Override public ActionTween delay(float d) { super.delay(delay); if (actionEvents != null && d > 0) { DelayTo delay = new DelayTo(d); delay.setDelay(0); return event(delay); } else { return this; } } public ActionTween repeat(float time) { return repeat(1, time); } @Override public ActionTween repeat(int count, float time) { super.repeat(count, time); if (actionEvents == null) { return this; } isRepeat = true; boolean update = count > 1; ReplayTo replay = new ReplayTo(null, update); if (update) { replay.count = count; } event(replay); return delay(time); } @Override public ActionTween repeatBackward(int count, float time) { super.repeatBackward(count, time); if (actionEvents == null) { return this; } isRepeat = true; boolean update = count > 1; ReplayTo replay = new ReplayTo(null, update); if (update) { replay.count = count; } event(replay); return delay(time); } /** * 自定义事件(连续动画)请在此处注入 * * @param event * @return */ public ActionTween event(ActionEvent event) { return event(event, null); } /** * 注入缓动动画(连续动画)事件(自定义事件也请在此处注入) * * @param event * @param listener * @return */ public ActionTween event(ActionEvent event, ActionListener listener) { if (actionEvents == null) { actionEvents = new Array<ActionEvent>(); } if (event != null) { actionEvents.add(event); if (listener != null) { event.setActionListener(listener); } } return this; } public ActionEvent getCurrentActionEvent() { return this.currentActionEvent; } @Override protected void reset() { super.reset(); _target = null; actionEvents = null; currentActionEvent = null; type = -1; equation = null; path = null; isFrom = isRelative = false; _combinedAttrsSize = _funPointsSize = 0; if (accessorBuffer.length != combinedAttrsLimit) { accessorBuffer = new float[combinedAttrsLimit]; } if (pathBuffer.length != (2 + funPointsLimit) * combinedAttrsLimit) { pathBuffer = new float[(2 + funPointsLimit) * combinedAttrsLimit]; } } private void setup(ActionBind target, int tweenType, float duration) { if (duration < 0) { throw new LSysException("Duration can't be negative ."); } this._target = target; this.type = tweenType; this.duration = duration; } public ActionTween delayGlobal(float delay) { ActionControl.setDelay((long) (delay * 1000f)); return this; } public ActionTween ease(Easing ease) { this.equation = ease; return this; } public ActionTween target(float targetValue) { targetValues[0] = targetValue; return this; } public ActionTween target(float targetValue1, float targetValue2) { targetValues[0] = targetValue1; targetValues[1] = targetValue2; return this; } public ActionTween target(float targetValue1, float targetValue2, float targetValue3) { targetValues[0] = targetValue1; targetValues[1] = targetValue2; targetValues[2] = targetValue3; return this; } public ActionTween target(float... targetValues) { if (targetValues.length > combinedAttrsLimit) { return this; } System.arraycopy(targetValues, 0, this.targetValues, 0, targetValues.length); return this; } public ActionTween targetRelative(float targetValue) { isRelative = true; targetValues[0] = isInitialized() ? targetValue + startValues[0] : targetValue; return this; } public ActionTween targetRelative(float targetValue1, float targetValue2) { isRelative = true; targetValues[0] = isInitialized() ? targetValue1 + startValues[0] : targetValue1; targetValues[1] = isInitialized() ? targetValue2 + startValues[1] : targetValue2; return this; } public ActionTween targetRelative(float targetValue1, float targetValue2, float targetValue3) { isRelative = true; targetValues[0] = isInitialized() ? targetValue1 + startValues[0] : targetValue1; targetValues[1] = isInitialized() ? targetValue2 + startValues[1] : targetValue2; targetValues[2] = isInitialized() ? targetValue3 + startValues[2] : targetValue3; return this; } public ActionTween targetRelative(float... targetValues) { if (targetValues.length > combinedAttrsLimit) { return this; } for (int i = 0; i < targetValues.length; i++) { this.targetValues[i] = isInitialized() ? targetValues[i] + startValues[i] : targetValues[i]; } isRelative = true; return this; } public ActionTween funPoint(float targetValue) { if (_funPointsSize == funPointsLimit) { return this; } funPoints[_funPointsSize] = targetValue; _funPointsSize += 1; return this; } public ActionTween funPoint(float targetValue1, float targetValue2) { if (_funPointsSize == funPointsLimit) { return this; } funPoints[_funPointsSize * 2] = targetValue1; funPoints[_funPointsSize * 2 + 1] = targetValue2; _funPointsSize += 1; return this; } public ActionTween funPoint(float targetValue1, float targetValue2, float targetValue3) { if (_funPointsSize == funPointsLimit) { return this; } funPoints[_funPointsSize * 3] = targetValue1; funPoints[_funPointsSize * 3 + 1] = targetValue2; funPoints[_funPointsSize * 3 + 2] = targetValue3; _funPointsSize += 1; return this; } public ActionTween funPoint(float... targetValues) { if (_funPointsSize == funPointsLimit) { return this; } System.arraycopy(targetValues, 0, funPoints, _funPointsSize * targetValues.length, targetValues.length); _funPointsSize += 1; return this; } public ActionTween path(ActionPath path) { this.path = path; return this; } public ActionBind getTarget() { return _target; } public int getType() { return type; } public Easing getEasing() { return equation; } public float[] getTargetValues() { return targetValues; } public int getCombinedAttributesCount() { return _combinedAttrsSize; } @Override public void free() { pool.free(this); ActionControl.get().removeAllActions(_target); } @Override protected void initializeOverride() { if (_target == null) { return; } ActionType.getValues(_target, type, startValues); for (int i = 0; i < _combinedAttrsSize; i++) { targetValues[i] += isRelative ? startValues[i] : 0; for (int ii = 0; ii < _funPointsSize; ii++) { funPoints[ii * _combinedAttrsSize + i] += isRelative ? startValues[i] : 0; } if (isFrom) { float tmp = startValues[i]; startValues[i] = targetValues[i]; targetValues[i] = tmp; } } } private ActionEvent currentActionEvent; private Array<ActionEvent> repeatList; @Override protected boolean actionEventOver() { if (actionEvents == null) { return true; } if (actionEvents != null) { if (currentActionEvent != null && !currentActionEvent.isComplete()) { return false; } else if (currentActionEvent != null && currentActionEvent.isComplete()) { if (repeatList == null) { repeatList = new Array<ActionEvent>(); } if (!(currentActionEvent instanceof ReplayTo)) { repeatList.add(currentActionEvent.reverse()); } } ActionEvent event = actionEvents.first(); if (event != currentActionEvent && event != null) { actionEvents.remove(0); if (isRepeat) { if (event instanceof ReplayTo && repeatList != null && repeatList.size() > 0) { ReplayTo replayTo = ((ReplayTo) event); int size = replayTo.count - 1; if (size > 0) { for (int i = 0; i < size; i++) { repeatList.addFront(new ReplayTo(null)); repeatList.addFront(new DelayTo(0)); } } replayTo.set(repeatList); repeatList.clear(); } } ActionControl.get().addAction(event, _target); currentActionEvent = event; } } if (currentActionEvent != null && !currentActionEvent.isComplete()) { return false; } return (actionEvents == null || actionEvents.size() == 0); } @Override protected void update(int step, int lastStep, boolean isIterationStep, float delta) { if (_target == null || equation == null) { return; } if (!isIterationStep && step > lastStep) { ActionType.setValues(_target, type, isReverse(lastStep) ? startValues : targetValues); return; } if (!isIterationStep && step < lastStep) { ActionType.setValues(_target, type, isReverse(lastStep) ? targetValues : startValues); return; } if (duration < 0.00000000001f && delta > -0.00000000001f) { ActionType.setValues(_target, type, isReverse(step) ? targetValues : startValues); return; } if (duration < 0.00000000001f && delta < 0.00000000001f) { ActionType.setValues(_target, type, isReverse(step) ? startValues : targetValues); return; } float time = isReverse(step) ? duration - getCurrentTime() : getCurrentTime(); float t = equation.apply(time, duration, false); if (_funPointsSize == 0 || path == null) { for (int i = 0; i < _combinedAttrsSize; i++) { accessorBuffer[i] = startValues[i] + t * (targetValues[i] - startValues[i]); } } else { for (int i = 0; i < _combinedAttrsSize; i++) { pathBuffer[0] = startValues[i]; pathBuffer[1 + _funPointsSize] = targetValues[i]; for (int ii = 0; ii < _funPointsSize; ii++) { pathBuffer[ii + 1] = funPoints[ii * _combinedAttrsSize + i]; } accessorBuffer[i] = path.compute(t, pathBuffer, _funPointsSize + 2); } } ActionType.setValues(_target, type, accessorBuffer); } @Override protected void forceStartValues() { if (_target == null) { return; } ActionType.setValues(_target, type, startValues); } @Override protected void forceEndValues() { if (_target == null) { return; } ActionType.setValues(_target, type, targetValues); } @Override public ActionTween build() { if (_target == null) { return this; } _combinedAttrsSize = ActionType.getValues(_target, type, accessorBuffer); return this; } @Override protected boolean containsTarget(ActionBind target) { return this._target == target; } @Override protected boolean containsTarget(ActionBind target, int tweenType) { return this._target == target && this.type == tweenType; } @Override public String toString() { if (actionEvents == null) { return "ActionTween []"; } StringKeyValue builder = new StringKeyValue("ActionTween"); for (; actionEvents.hashNext();) { ActionEvent eve = actionEvents.next(); if (eve != null) { builder.addValue(eve.toString()); builder.newLine(); } } actionEvents.stopNext(); return builder.toString(); } }
13,102
886
// // detail/chrono_time_traits.hpp // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // // Copyright (c) 2003-2016 <NAME> (chris at kohlhoff dot com) // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // #ifndef BOOST_ASIO_DETAIL_CHRONO_TIME_TRAITS_HPP #define BOOST_ASIO_DETAIL_CHRONO_TIME_TRAITS_HPP #if defined(_MSC_VER) && (_MSC_VER >= 1200) # pragma once #endif // defined(_MSC_VER) && (_MSC_VER >= 1200) #include <boost/asio/detail/cstdint.hpp> #include <boost/asio/detail/push_options.hpp> namespace boost { namespace asio { namespace detail { // Helper template to compute the greatest common divisor. template <int64_t v1, int64_t v2> struct gcd { enum { value = gcd<v2, v1 % v2>::value }; }; template <int64_t v1> struct gcd<v1, 0> { enum { value = v1 }; }; // Adapts std::chrono clocks for use with a deadline timer. template <typename Clock, typename WaitTraits> struct chrono_time_traits { // The clock type. typedef Clock clock_type; // The duration type of the clock. typedef typename clock_type::duration duration_type; // The time point type of the clock. typedef typename clock_type::time_point time_type; // The period of the clock. typedef typename duration_type::period period_type; // Get the current time. static time_type now() { return clock_type::now(); } // Add a duration to a time. static time_type add(const time_type& t, const duration_type& d) { const time_type epoch; if (t >= epoch) { if ((time_type::max)() - t < d) return (time_type::max)(); } else // t < epoch { if (-(t - (time_type::min)()) > d) return (time_type::min)(); } return t + d; } // Subtract one time from another. static duration_type subtract(const time_type& t1, const time_type& t2) { const time_type epoch; if (t1 >= epoch) { if (t2 >= epoch) { return t1 - t2; } else if (t2 == (time_type::min)()) { return (duration_type::max)(); } else if ((time_type::max)() - t1 < epoch - t2) { return (duration_type::max)(); } else { return t1 - t2; } } else // t1 < epoch { if (t2 < epoch) { return t1 - t2; } else if (t1 == (time_type::min)()) { return (duration_type::min)(); } else if ((time_type::max)() - t2 < epoch - t1) { return (duration_type::min)(); } else { return -(t2 - t1); } } } // Test whether one time is less than another. static bool less_than(const time_type& t1, const time_type& t2) { return t1 < t2; } // Implement just enough of the posix_time::time_duration interface to supply // what the timer_queue requires. class posix_time_duration { public: explicit posix_time_duration(const duration_type& d) : d_(d) { } int64_t ticks() const { return d_.count(); } int64_t total_seconds() const { return duration_cast<1, 1>(); } int64_t total_milliseconds() const { return duration_cast<1, 1000>(); } int64_t total_microseconds() const { return duration_cast<1, 1000000>(); } private: template <int64_t Num, int64_t Den> int64_t duration_cast() const { const int64_t num1 = period_type::num / gcd<period_type::num, Num>::value; const int64_t num2 = Num / gcd<period_type::num, Num>::value; const int64_t den1 = period_type::den / gcd<period_type::den, Den>::value; const int64_t den2 = Den / gcd<period_type::den, Den>::value; const int64_t num = num1 * den2; const int64_t den = num2 * den1; if (num == 1 && den == 1) return ticks(); else if (num != 1 && den == 1) return ticks() * num; else if (num == 1 && period_type::den != 1) return ticks() / den; else return ticks() * num / den; } duration_type d_; }; // Convert to POSIX duration type. static posix_time_duration to_posix_duration(const duration_type& d) { return posix_time_duration(WaitTraits::to_wait_duration(d)); } }; } // namespace detail } // namespace asio } // namespace boost #include <boost/asio/detail/pop_options.hpp> #endif // BOOST_ASIO_DETAIL_CHRONO_TIME_TRAITS_HPP
1,904
391
import textwrap from dna_features_viewer.biotools import ( extract_graphical_translation, reverse_complement, find_narrowest_text_wrap, ) def test_extract_graphical_translation(): seq = "ATGGACAGAACAATATAA" seq1 = "ATGC" + seq + "GTTC" seq2 = "ATGC" + reverse_complement(seq) + "GTTC" assert extract_graphical_translation(seq1, (4, 22)) == "MDRTI*" assert extract_graphical_translation(seq2, (4, 22, -1)) == "MDRTI*"[::-1] def test_find_narrowest_text_wrap(): text = "Chloramphenicol resistance marker" naive_wrap = textwrap.wrap(text, 30) assert (len(naive_wrap) == 2) and len(naive_wrap[0]) == 26 narrow_wrap = find_narrowest_text_wrap(text, 30) lines = narrow_wrap.split("\n") assert len(lines) == 2 assert max(len(l) for l in lines) == 17
329
930
package vip.mate.core.rule.entity; import lombok.*; /** * 黑名单工具类 */ @Setter @Getter @Builder @NoArgsConstructor @AllArgsConstructor public class BlackList { /** * 主键ID */ private Long id; /** * IP地址 */ private String ip; /** * 请求uri */ private String requestUri; /** * 请求方法 */ private String requestMethod; /** * 开始时间 */ private String startTime; /** * 截止时间 */ private String endTime; /** * 黑名单状态:1:开启 0:关闭 */ private String status; /** * 创建时间 */ private String createTime; }
366
848
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "HM01B0.h" #include "HM01B0_Walking1s_01.h" #include "am_bsp.h" #include "am_mcu_apollo.h" #include "am_util.h" #include "platform_Sparkfun_Edge.h" //#define ENABLE_ASYNC const am_hal_gpio_pincfg_t g_HM01B0_pin_vsync = { .uFuncSel = 3, .eGPOutcfg = AM_HAL_GPIO_PIN_OUTCFG_DISABLE, #ifdef ENABLE_ASYNC .eIntDir = AM_HAL_GPIO_PIN_INTDIR_BOTH, #endif .eGPInput = AM_HAL_GPIO_PIN_INPUT_ENABLE, .eGPRdZero = AM_HAL_GPIO_PIN_RDZERO_READPIN}; const am_hal_gpio_pincfg_t g_HM01B0_pin_int = { .uFuncSel = 3, .eGPOutcfg = AM_HAL_GPIO_PIN_OUTCFG_DISABLE, .eIntDir = AM_HAL_GPIO_PIN_INTDIR_LO2HI, .eGPInput = AM_HAL_GPIO_PIN_INPUT_ENABLE, .eGPRdZero = AM_HAL_GPIO_PIN_RDZERO_READPIN}; #ifdef ENABLE_ASYNC static bool s_bVsyncAsserted = false; //***************************************************************************** // // GPIO ISR // //***************************************************************************** static void hm01b0_gpio_isr(void) { // // Clear the GPIO Interrupt (write to clear). // am_hal_gpio_interrupt_clear(1 << HM01B0_PIN_VSYNC); if (read_vsync()) { s_bVsyncAsserted = true; } else { s_bVsyncAsserted = false; } } #endif //***************************************************************************** // //! @brief Write HM01B0 registers //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! @param ui16Reg - Register address. //! @param pui8Value - Pointer to the data to be written. //! @param ui32NumBytes - Length of the data in bytes to be written. //! //! This function writes value to HM01B0 registers. //! //! @return Error code. // //***************************************************************************** static uint32_t hm01b0_write_reg(hm01b0_cfg_t* psCfg, uint16_t ui16Reg, uint8_t* pui8Value, uint32_t ui32NumBytes) { am_hal_iom_transfer_t Transaction; // // Create the transaction. // Transaction.ui32InstrLen = sizeof(uint16_t); Transaction.ui32Instr = (ui16Reg & 0x0000FFFF); Transaction.eDirection = AM_HAL_IOM_TX; Transaction.ui32NumBytes = ui32NumBytes; Transaction.pui32TxBuffer = (uint32_t*)pui8Value; Transaction.uPeerInfo.ui32I2CDevAddr = (uint32_t)psCfg->ui16SlvAddr; Transaction.bContinue = false; Transaction.ui8RepeatCount = 0; Transaction.ui32PauseCondition = 0; Transaction.ui32StatusSetClr = 0; // // Execute the transction over IOM. // if (am_hal_iom_blocking_transfer(psCfg->pIOMHandle, &Transaction)) { return HM01B0_ERR_I2C; } return HM01B0_ERR_OK; } //***************************************************************************** // //! @brief Read HM01B0 registers //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! @param ui16Reg - Register address. //! @param pui8Value - Pointer to the buffer for read data to be put //! into. //! @param ui32NumBytes - Length of the data to be read. //! //! This function reads value from HM01B0 registers. //! //! @return Error code. // //***************************************************************************** static uint32_t hm01b0_read_reg(hm01b0_cfg_t* psCfg, uint16_t ui16Reg, uint8_t* pui8Value, uint32_t ui32NumBytes) { am_hal_iom_transfer_t Transaction; // // Create the transaction. // Transaction.ui32InstrLen = sizeof(uint16_t); Transaction.ui32Instr = (ui16Reg & 0x0000FFFF); Transaction.eDirection = AM_HAL_IOM_RX; Transaction.ui32NumBytes = ui32NumBytes; Transaction.pui32RxBuffer = (uint32_t*)pui8Value; ; Transaction.uPeerInfo.ui32I2CDevAddr = (uint32_t)psCfg->ui16SlvAddr; Transaction.bContinue = false; Transaction.ui8RepeatCount = 0; Transaction.ui32PauseCondition = 0; Transaction.ui32StatusSetClr = 0; // // Execute the transction over IOM. // if (am_hal_iom_blocking_transfer(psCfg->pIOMHandle, &Transaction)) { return HM01B0_ERR_I2C; } return HM01B0_ERR_OK; } //***************************************************************************** // //! @brief Load HM01B0 a given script //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! @param psScrip - Pointer to the script to be loaded. //! @param ui32ScriptCmdNum - Number of entries in a given script. //! //! This function loads HM01B0 a given script. //! //! @return Error code. // //***************************************************************************** static uint32_t hm01b0_load_script(hm01b0_cfg_t* psCfg, hm_script_t* psScript, uint32_t ui32ScriptCmdNum) { uint32_t ui32Err = HM01B0_ERR_OK; for (uint32_t idx = 0; idx < ui32ScriptCmdNum; idx++) { ui32Err = hm01b0_write_reg(psCfg, (psScript + idx)->ui16Reg, &((psScript + idx)->ui8Val), sizeof(uint8_t)); if (ui32Err != HM01B0_ERR_OK) { break; } } return ui32Err; } //***************************************************************************** // //! @brief Power up HM01B0 //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! //! This function powers up HM01B0. //! //! @return none. // //***************************************************************************** void hm01b0_power_up(hm01b0_cfg_t* psCfg) { // place holder } //***************************************************************************** // //! @brief Power down HM01B0 //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! //! This function powers up HM01B0. //! //! @return none. // //***************************************************************************** void hm01b0_power_down(hm01b0_cfg_t* psCfg) { // place holder } //***************************************************************************** // //! @brief Enable MCLK //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! //! This function utilizes CTimer to generate MCLK for HM01B0. //! //! @return none. // //***************************************************************************** void hm01b0_mclk_enable(hm01b0_cfg_t* psCfg) { #define MCLK_UI64PATTERN 0x55555555 #define MCLK_UI64PATTERNLEN 31 am_hal_clkgen_control(AM_HAL_CLKGEN_CONTROL_SYSCLK_MAX, 0); // // Set up timer. // am_hal_ctimer_clear(psCfg->ui32CTimerModule, psCfg->ui32CTimerSegment); am_hal_ctimer_config_single( psCfg->ui32CTimerModule, psCfg->ui32CTimerSegment, (AM_HAL_CTIMER_FN_PTN_REPEAT | AM_HAL_CTIMER_HFRC_12MHZ)); // // Set the pattern in the CMPR registers. // am_hal_ctimer_compare_set(psCfg->ui32CTimerModule, psCfg->ui32CTimerSegment, 0, (uint32_t)(MCLK_UI64PATTERN & 0xFFFF)); am_hal_ctimer_compare_set(psCfg->ui32CTimerModule, psCfg->ui32CTimerSegment, 1, (uint32_t)((MCLK_UI64PATTERN >> 16) & 0xFFFF)); // // Set the timer trigger and pattern length. // am_hal_ctimer_config_trigger( psCfg->ui32CTimerModule, psCfg->ui32CTimerSegment, ((MCLK_UI64PATTERNLEN << CTIMER_AUX0_TMRA0LMT_Pos) | (CTIMER_AUX0_TMRB0TRIG_DIS << CTIMER_AUX0_TMRA0TRIG_Pos))); // // Configure timer output pin. // am_hal_ctimer_output_config(psCfg->ui32CTimerModule, psCfg->ui32CTimerSegment, psCfg->ui32CTimerOutputPin, AM_HAL_CTIMER_OUTPUT_NORMAL, AM_HAL_GPIO_PIN_DRIVESTRENGTH_12MA); // // Start the timer. // am_hal_ctimer_start(psCfg->ui32CTimerModule, psCfg->ui32CTimerSegment); } //***************************************************************************** // //! @brief Disable MCLK //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! //! This function disable CTimer to stop MCLK for HM01B0. //! //! @return none. // //***************************************************************************** void hm01b0_mclk_disable(hm01b0_cfg_t* psCfg) { // // Stop the timer. // am_hal_ctimer_stop(psCfg->ui32CTimerModule, psCfg->ui32CTimerSegment); am_hal_gpio_pinconfig(psCfg->ui32CTimerOutputPin, g_AM_HAL_GPIO_DISABLE); } //***************************************************************************** // //! @brief Initialize interfaces //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! //! This function initializes interfaces. //! //! @return Error code. // //***************************************************************************** uint32_t hm01b0_init_if(hm01b0_cfg_t* psCfg) { void* pIOMHandle = NULL; if (psCfg->ui32IOMModule > AM_REG_IOM_NUM_MODULES) { return HM01B0_ERR_I2C; } // // Enable fault detection. // #if AM_APOLLO3_MCUCTRL am_hal_mcuctrl_control(AM_HAL_MCUCTRL_CONTROL_FAULT_CAPTURE_ENABLE, 0); #else // AM_APOLLO3_MCUCTRL am_hal_mcuctrl_fault_capture_enable(); #endif // AM_APOLLO3_MCUCTRL // // Initialize the IOM instance. // Enable power to the IOM instance. // Configure the IOM for Serial operation during initialization. // Enable the IOM. // if (am_hal_iom_initialize(psCfg->ui32IOMModule, &pIOMHandle) || am_hal_iom_power_ctrl(pIOMHandle, AM_HAL_SYSCTRL_WAKE, false) || am_hal_iom_configure(pIOMHandle, &(psCfg->sIOMCfg)) || am_hal_iom_enable(pIOMHandle)) { return HM01B0_ERR_I2C; } else { // // Configure the IOM pins. // am_bsp_iom_pins_enable(psCfg->ui32IOMModule, psCfg->eIOMMode); psCfg->pIOMHandle = pIOMHandle; } // initialize pins for camera parallel interface. am_hal_gpio_fastgpio_disable(psCfg->ui8PinD0); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD1); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD2); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD3); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD4); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD5); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD6); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD7); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD0); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD1); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD2); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD3); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD4); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD5); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD6); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD7); am_hal_gpio_fast_pinconfig( (uint64_t)0x1 << psCfg->ui8PinD0 | (uint64_t)0x1 << psCfg->ui8PinD1 | (uint64_t)0x1 << psCfg->ui8PinD2 | (uint64_t)0x1 << psCfg->ui8PinD3 | (uint64_t)0x1 << psCfg->ui8PinD4 | (uint64_t)0x1 << psCfg->ui8PinD5 | (uint64_t)0x1 << psCfg->ui8PinD6 | (uint64_t)0x1 << psCfg->ui8PinD7, g_AM_HAL_GPIO_INPUT, 0); am_hal_gpio_pinconfig(psCfg->ui8PinVSYNC, g_HM01B0_pin_vsync); #ifdef ENABLE_ASYNC psCfg->pfnGpioIsr = hm01b0_gpio_isr; am_hal_gpio_interrupt_clear(AM_HAL_GPIO_BIT(psCfg->ui8PinVSYNC)); am_hal_gpio_interrupt_enable(AM_HAL_GPIO_BIT(psCfg->ui8PinVSYNC)); NVIC_EnableIRQ(GPIO_IRQn); #endif am_hal_gpio_pinconfig(psCfg->ui8PinHSYNC, g_AM_HAL_GPIO_INPUT); am_hal_gpio_pinconfig(psCfg->ui8PinPCLK, g_AM_HAL_GPIO_INPUT); am_hal_gpio_pinconfig(psCfg->ui8PinTrig, g_AM_HAL_GPIO_OUTPUT); am_hal_gpio_pinconfig(psCfg->ui8PinInt, g_AM_HAL_GPIO_DISABLE); // am_hal_gpio_pinconfig(psCfg->ui8PinInt, g_HM01B0_pin_int); // am_hal_gpio_interrupt_clear(AM_HAL_GPIO_BIT(psCfg->ui8PinInt)); // am_hal_gpio_interrupt_enable(AM_HAL_GPIO_BIT(psCfg->ui8PinInt)); // NVIC_EnableIRQ(GPIO_IRQn); return HM01B0_ERR_OK; } //***************************************************************************** // //! @brief Deinitialize interfaces //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! //! This function deinitializes interfaces. //! //! @return Error code. // //***************************************************************************** uint32_t hm01b0_deinit_if(hm01b0_cfg_t* psCfg) { am_hal_iom_disable(psCfg->pIOMHandle); am_hal_iom_uninitialize(psCfg->pIOMHandle); am_hal_gpio_pinconfig(psCfg->ui8PinSCL, g_AM_HAL_GPIO_DISABLE); am_hal_gpio_pinconfig(psCfg->ui8PinSDA, g_AM_HAL_GPIO_DISABLE); // initialize pins for camera parallel interface. am_hal_gpio_fastgpio_disable(psCfg->ui8PinD0); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD1); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD2); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD3); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD4); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD5); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD6); am_hal_gpio_fastgpio_disable(psCfg->ui8PinD7); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD0); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD1); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD2); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD3); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD4); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD5); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD6); am_hal_gpio_fastgpio_clr(psCfg->ui8PinD7); am_hal_gpio_pinconfig(psCfg->ui8PinVSYNC, g_AM_HAL_GPIO_DISABLE); #ifdef ENABLE_ASYNC NVIC_DisableIRQ(GPIO_IRQn); am_hal_gpio_interrupt_disable(AM_HAL_GPIO_BIT(psCfg->ui8PinVSYNC)); am_hal_gpio_interrupt_clear(AM_HAL_GPIO_BIT(psCfg->ui8PinVSYNC)); psCfg->pfnGpioIsr = NULL; #endif am_hal_gpio_pinconfig(psCfg->ui8PinHSYNC, g_AM_HAL_GPIO_DISABLE); am_hal_gpio_pinconfig(psCfg->ui8PinPCLK, g_AM_HAL_GPIO_DISABLE); am_hal_gpio_pinconfig(psCfg->ui8PinTrig, g_AM_HAL_GPIO_DISABLE); am_hal_gpio_pinconfig(psCfg->ui8PinInt, g_AM_HAL_GPIO_DISABLE); return HM01B0_ERR_OK; } //***************************************************************************** // //! @brief Get HM01B0 Model ID //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! @param pui16MID - Pointer to buffer for the read back model ID. //! //! This function reads back HM01B0 model ID. //! //! @return Error code. // //***************************************************************************** uint32_t hm01b0_get_modelid(hm01b0_cfg_t* psCfg, uint16_t* pui16MID) { uint8_t ui8Data[1]; uint32_t ui32Err; *pui16MID = 0x0000; ui32Err = hm01b0_read_reg(psCfg, HM01B0_REG_MODEL_ID_H, ui8Data, sizeof(ui8Data)); if (ui32Err == HM01B0_ERR_OK) { *pui16MID |= (ui8Data[0] << 8); } ui32Err = hm01b0_read_reg(psCfg, HM01B0_REG_MODEL_ID_L, ui8Data, sizeof(ui8Data)); if (ui32Err == HM01B0_ERR_OK) { *pui16MID |= ui8Data[0]; } return ui32Err; } //***************************************************************************** // //! @brief Initialize HM01B0 //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! @param psScript - Pointer to HM01B0 initialization script. //! @param ui32ScriptCmdNum - No. of commands in HM01B0 initialization //! script. //! //! This function initilizes HM01B0 with a given script. //! //! @return Error code. // //***************************************************************************** uint32_t hm01b0_init_system(hm01b0_cfg_t* psCfg, hm_script_t* psScript, uint32_t ui32ScriptCmdNum) { return hm01b0_load_script(psCfg, psScript, ui32ScriptCmdNum); } //***************************************************************************** // //! @brief Set HM01B0 in the walking 1s test mode //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! //! This function sets HM01B0 in the walking 1s test mode. //! //! @return Error code. // //***************************************************************************** uint32_t hm01b0_test_walking1s(hm01b0_cfg_t* psCfg) { uint32_t ui32ScriptCmdNum = sizeof(sHM01b0TestModeScript_Walking1s) / sizeof(hm_script_t); hm_script_t* psScript = (hm_script_t*)sHM01b0TestModeScript_Walking1s; return hm01b0_load_script(psCfg, psScript, ui32ScriptCmdNum); } //***************************************************************************** // //! @brief Check the data read from HM01B0 in the walking 1s test mode //! //! @param pui8Buffer - Pointer to data buffer. //! @param ui32BufferLen - Buffer length //! @param ui32PrintCnt - Number of mismatched data to be printed out //! //! This function sets HM01B0 in the walking 1s test mode. //! //! @return Error code. // //***************************************************************************** void hm01b0_test_walking1s_check_data_sanity(uint8_t* pui8Buffer, uint32_t ui32BufferLen, uint32_t ui32PrintCnt) { uint8_t ui8ByteData = *pui8Buffer; uint32_t ui32MismatchCnt = 0x00; for (uint32_t ui32Idx = 0; ui32Idx < ui32BufferLen; ui32Idx++) { if (*(pui8Buffer + ui32Idx) != ui8ByteData) { if (ui32PrintCnt) { am_util_stdio_printf("[0x%08X] actual 0x%02X expected 0x%02X\n", ui32Idx, *(pui8Buffer + ui32Idx), ui8ByteData); am_util_delay_ms(1); ui32PrintCnt--; } ui32MismatchCnt++; } if (ui8ByteData) ui8ByteData = ui8ByteData << 1; else ui8ByteData = 0x01; } am_util_stdio_printf("Mismatch Rate %d/%d\n", ui32MismatchCnt, ui32BufferLen); } //***************************************************************************** // //! @brief Software reset HM01B0 //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! //! This function resets HM01B0 by issuing a reset command. //! //! @return Error code. // //***************************************************************************** uint32_t hm01b0_reset_sw(hm01b0_cfg_t* psCfg) { uint8_t ui8Data[1] = {0x00}; return hm01b0_write_reg(psCfg, HM01B0_REG_SW_RESET, ui8Data, sizeof(ui8Data)); } //***************************************************************************** // //! @brief Get current HM01B0 operation mode. //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! @param pui8Mode - Pointer to buffer //! - for the read back operation mode to be put into //! //! This function get HM01B0 operation mode. //! //! @return Error code. // //***************************************************************************** uint32_t hm01b0_get_mode(hm01b0_cfg_t* psCfg, uint8_t* pui8Mode) { uint8_t ui8Data[1] = {0x01}; uint32_t ui32Err; ui32Err = hm01b0_read_reg(psCfg, HM01B0_REG_MODE_SELECT, ui8Data, sizeof(ui8Data)); *pui8Mode = ui8Data[0]; return ui32Err; } //***************************************************************************** // //! @brief Set HM01B0 operation mode. //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! @param ui8Mode - Operation mode. One of: //! HM01B0_REG_MODE_SELECT_STANDBY //! HM01B0_REG_MODE_SELECT_STREAMING //! HM01B0_REG_MODE_SELECT_STREAMING_NFRAMES //! HM01B0_REG_MODE_SELECT_STREAMING_HW_TRIGGER //! @param ui8FrameCnt - Frame count for //! HM01B0_REG_MODE_SELECT_STREAMING_NFRAMES. //! - Discarded if other modes. //! //! This function set HM01B0 operation mode. //! //! @return Error code. // //***************************************************************************** uint32_t hm01b0_set_mode(hm01b0_cfg_t* psCfg, uint8_t ui8Mode, uint8_t ui8FrameCnt) { uint32_t ui32Err = HM01B0_ERR_OK; if (ui8Mode == HM01B0_REG_MODE_SELECT_STREAMING_NFRAMES) { ui32Err = hm01b0_write_reg(psCfg, HM01B0_REG_PMU_PROGRAMMABLE_FRAMECNT, &ui8FrameCnt, sizeof(ui8FrameCnt)); } if (ui32Err == HM01B0_ERR_OK) { ui32Err = hm01b0_write_reg(psCfg, HM01B0_REG_MODE_SELECT, &ui8Mode, sizeof(ui8Mode)); } return ui32Err; } //***************************************************************************** // //! @brief Hardware trigger HM01B0 to stream. //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! @param bTrigger - True to start streaming //! - False to stop streaming //! //! This function triggers HM01B0 to stream by toggling the TRIG pin. //! //! @return Error code. // //***************************************************************************** uint32_t hm01b0_hardware_trigger_streaming(hm01b0_cfg_t* psCfg, bool bTrigger) { uint32_t ui32Err = HM01B0_ERR_OK; uint8_t ui8Mode; ui32Err = hm01b0_get_mode(psCfg, &ui8Mode); if (ui32Err != HM01B0_ERR_OK) goto end; if (ui8Mode != HM01B0_REG_MODE_SELECT_STREAMING_HW_TRIGGER) { ui32Err = HM01B0_ERR_MODE; goto end; } if (bTrigger) { am_hal_gpio_output_set(psCfg->ui8PinTrig); } else { am_hal_gpio_output_clear(psCfg->ui8PinTrig); } end: return ui32Err; } //***************************************************************************** // //! @brief Set HM01B0 mirror mode. //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! @param bHmirror - Horizontal mirror //! @param bVmirror - Vertical mirror //! //! This function set HM01B0 mirror mode. //! //! @return Error code. // //***************************************************************************** uint32_t hm01b0_set_mirror(hm01b0_cfg_t* psCfg, bool bHmirror, bool bVmirror) { uint8_t ui8Data = 0x00; uint32_t ui32Err = HM01B0_ERR_OK; if (bHmirror) { ui8Data |= HM01B0_REG_IMAGE_ORIENTATION_HMIRROR; } if (bVmirror) { ui8Data |= HM01B0_REG_IMAGE_ORIENTATION_VMIRROR; } ui32Err = hm01b0_write_reg(psCfg, HM01B0_REG_IMAGE_ORIENTATION, &ui8Data, sizeof(ui8Data)); if (ui32Err == HM01B0_ERR_OK) { ui8Data = HM01B0_REG_GRP_PARAM_HOLD_HOLD; ui32Err = hm01b0_write_reg(psCfg, HM01B0_REG_GRP_PARAM_HOLD, &ui8Data, sizeof(ui8Data)); } return ui32Err; } //***************************************************************************** // //! @brief Read data of one frame from HM01B0. //! //! @param psCfg - Pointer to HM01B0 configuration structure. //! @param pui8Buffer - Pointer to the frame buffer. //! @param ui32BufferLen - Framebuffer size. //! //! This function read data of one frame from HM01B0. //! //! @return Error code. // //***************************************************************************** uint32_t hm01b0_blocking_read_oneframe(hm01b0_cfg_t* psCfg, uint8_t* pui8Buffer, uint32_t ui32BufferLen) { uint32_t ui32Err = HM01B0_ERR_OK; uint32_t ui32Idx = 0x00; am_util_stdio_printf("[%s] +\n", __func__); #ifdef ENABLE_ASYNC while (!s_bVsyncAsserted); while (s_bVsyncAsserted) { // we don't check HSYNC here on the basis of assuming HM01B0 in the gated // PCLK mode which PCLK toggles only when HSYNC is asserted. And also to // minimize the overhead of polling. if (read_pclk()) { *(pui8Buffer + ui32Idx++) = read_byte(); if (ui32Idx == ui32BufferLen) { goto end; } while (read_pclk()); } } #else uint32_t ui32HsyncCnt = 0x00; while ((ui32HsyncCnt < HM01B0_PIXEL_Y_NUM)) { while (0x00 == read_hsync()); // read one row while (read_hsync()) { while (0x00 == read_pclk()); *(pui8Buffer + ui32Idx++) = read_byte(); if (ui32Idx == ui32BufferLen) { goto end; } while (read_pclk()); } ui32HsyncCnt++; } #endif end: am_util_stdio_printf("[%s] - Byte Counts %d\n", __func__, ui32Idx); return ui32Err; }
10,418
5,169
{ "name": "YieldmoSDK", "version": "4.1.0", "summary": "Yieldmo iOS SDK to deliver ads to your Mobile app.", "description": "Yieldmo builds mobile advertising products driven by Design and Data. Our SDK makes this easier to integrate with your apps.", "homepage": "https://github.com/yieldmo/yieldmo-ios-sdk", "license": { "type": "commercial", "text": "Copyright 2016 Yieldmo, Inc. All rights reserved." }, "authors": "Yieldmo, Inc.", "platforms": { "ios": "8.0" }, "source": { "http": "https://github.com/yieldmo/yieldmo-ios-sdk/releases/download/v4.1.0/Yieldmo-iOS-SDK-v4.1.0.tar.gz" }, "preserve_paths": "Release/*.framework", "vendored_frameworks": "Release/Yieldmo.framework", "requires_arc": true }
286
731
#ifndef _BINDED_FUNC_HPP #define _BINDED_FUNC_HPP #include <memory> #include <string> #include "mode.hpp" namespace vind { class NTypeLogger ; class CharLogger ; class BindedFunc { private: struct Impl ; std::unique_ptr<Impl> pimpl ; virtual void error_process(const std::exception& e) const ; virtual void do_process() const = 0 ; virtual void do_process(NTypeLogger& parent_lgr) const = 0 ; virtual void do_process(const CharLogger& parent_lgr) const = 0 ; public: using SPtr = std::shared_ptr<BindedFunc> ; explicit BindedFunc() ; explicit BindedFunc(const std::string& name) ; explicit BindedFunc(std::string&& name) ; virtual ~BindedFunc() noexcept ; BindedFunc(BindedFunc&&) ; BindedFunc& operator=(BindedFunc&&) ; BindedFunc(const BindedFunc&) = delete ; BindedFunc& operator=(const BindedFunc&) = delete ; const std::string& name() const noexcept ; const std::size_t& id() const noexcept ; static std::size_t name_to_id(const std::string& name) noexcept { return std::hash<std::string>()(name) ; } static std::size_t name_to_id(std::string&& name) noexcept { return std::hash<std::string>()(std::move(name)) ; } void process() const ; void process(NTypeLogger& parent_lgr) const ; void process(const CharLogger& parent_lgr) const ; virtual bool is_for_moving_caret() const noexcept ; virtual bool is_for_changing_text() const noexcept ; virtual void reconstruct() ; bool operator==(const BindedFunc& rhs) const noexcept ; bool operator==(BindedFunc&& rhs) const noexcept ; bool operator!=(const BindedFunc& rhs) const noexcept ; bool operator!=(BindedFunc&& rhs) const noexcept ; } ; } #endif
827
674
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import logging import xml.etree.ElementTree as ET import arrow from builtins import * from nzbhydra.categories import getByNewznabCats from nzbhydra.exceptions import IndexerResultParsingException from nzbhydra.nzb_search_result import NzbSearchResult from nzbhydra.search_module import IndexerProcessingResult from nzbhydra.searchmodules import newznab logger = logging.getLogger('root') class Jackett(newznab.NewzNab): # todo feature: read caps from server on first run and store them in the config/database def __init__(self, settings): super(newznab.NewzNab, self).__init__(settings) super(Jackett, self).__init__(settings) self.settings = settings # Already done by super.__init__ but this way PyCharm knows the correct type self.module = "jackett" self.category_search = True self.supportedFilters = ["maxage"] self.supportsNot = False def get_details_link(self, guid): return guid def get_entry_by_id(self, guid, title): self.error("Function not supported") return None def get_search_urls(self, search_request, search_type="search"): f = self.build_base_url(search_type, search_request.category, offset=search_request.offset) query = search_request.query if query: f = f.add({"q": query}) if search_request.maxage: f = f.add({"maxage": search_request.maxage}) return [f.url] def process_query_result(self, xml_response, searchRequest, maxResults=None): self.debug("Started processing results") countRejected = self.getRejectedCountDict() acceptedEntries = [] entries, total, offset = self.parseXml(xml_response, maxResults) for entry in entries: accepted, reason,ri = self.accept_result(entry, searchRequest, self.supportedFilters) if accepted: acceptedEntries.append(entry) else: countRejected[ri] += 1 self.debug("Rejected search result. Reason: %s" % reason) if total == 0 or len(acceptedEntries) == 0: self.info("Query returned no results") return IndexerProcessingResult(entries=acceptedEntries, queries=[], total=0, total_known=True, has_more=False, rejected=countRejected) else: return IndexerProcessingResult(entries=acceptedEntries, queries=[], total=total, total_known=True, has_more=False, rejected=countRejected) def parseXml(self, xmlResponse, maxResults=None): entries = [] try: tree = ET.fromstring(xmlResponse.encode('utf-8')) except Exception: self.exception("Error parsing XML: %s..." % xmlResponse[:500]) raise IndexerResultParsingException("Error parsing XML", self) for item in tree.find("channel").findall("item"): entry = self.parseItem(item) entries.append(entry) if maxResults is not None and len(entries) == maxResults: break return entries, len(entries), 0 def parseItem(self, item): entry = self.create_nzb_search_result() # These are the values that absolutely must be contained in the response entry.title = item.find("title").text entry.title = self.cleanUpTitle(entry.title) entry.link = item.find("link").text entry.details_link = item.find("comments").text entry.indexerguid = item.find("guid").text entry.comments = 0 size = item.find("size") if size is not None: entry.size = int(size.text) entry.attributes = [] entry.has_nfo = NzbSearchResult.HAS_NFO_NO categories = item.find("category") if categories is not None: categories = categories.text entry.category = getByNewznabCats(categories) attributes = item.findall("torznab:attr", {"torznab": "http://torznab.com/schemas/2015/feed"}) attributes.extend(item.findall("newznab:attr", {"newznab": "http://www.newznab.com/DTD/2010/feeds/attributes/"})) for i in attributes: attribute_name = i.attrib["name"] attribute_value = i.attrib["value"] entry.attributes.append({"name": attribute_name, "value": attribute_value}) if attribute_name == "size": entry.size = int(attribute_value) if attribute_name == "grabs": entry.grabs = int(attribute_value) entry.pubDate = item.find("pubDate").text pubDate = arrow.get(entry.pubDate, 'ddd, DD MMM YYYY HH:mm:ss Z') self.getDates(entry, pubDate) entry.downloadType = "torrent" # For some trackers several results with the same ID are returned (e.g. PTP so we need to make sure the ID is unique) entry.indexerguid += str(entry.size) return entry def get_nfo(self, guid): return False, None, "NFOs not supported by indexer" def get_instance(indexer): return Jackett(indexer)
2,181
679
<reponame>Grosskopf/openoffice<filename>main/sw/source/ui/vba/vbaview.hxx<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef SW_VBA_VIEW_HXX #define SW_VBA_VIEW_HXX #include <ooo/vba/word/XView.hpp> #include <vbahelper/vbahelperinterface.hxx> #include <com/sun/star/text/XTextViewCursor.hpp> #include <com/sun/star/text/XTextRange.hpp> #include <com/sun/star/text/XText.hpp> #include <com/sun/star/beans/XPropertySet.hpp> typedef InheritedHelperInterfaceImpl1< ooo::vba::word::XView > SwVbaView_BASE; class SwVbaView : public SwVbaView_BASE { private: css::uno::Reference< css::frame::XModel > mxModel; css::uno::Reference< css::text::XTextViewCursor > mxViewCursor; css::uno::Reference< css::beans::XPropertySet > mxViewSettings; css::uno::Reference< css::text::XTextRange > getHFTextRange( sal_Int32 nType ) throw (css::uno::RuntimeException); css::uno::Reference< css::text::XTextRange > getFirstObjectPosition( const css::uno::Reference< css::text::XText >& xText ) throw (css::uno::RuntimeException); public: SwVbaView( const css::uno::Reference< ooo::vba::XHelperInterface >& rParent, const css::uno::Reference< css::uno::XComponentContext >& rContext, const css::uno::Reference< css::frame::XModel >& rModel ) throw ( css::uno::RuntimeException ); virtual ~SwVbaView(); // XView virtual ::sal_Int32 SAL_CALL getSeekView() throw (css::uno::RuntimeException); virtual void SAL_CALL setSeekView( ::sal_Int32 _seekview ) throw (css::uno::RuntimeException); virtual ::sal_Int32 SAL_CALL getSplitSpecial() throw (css::uno::RuntimeException); virtual void SAL_CALL setSplitSpecial( ::sal_Int32 _splitspecial ) throw (css::uno::RuntimeException); virtual ::sal_Bool SAL_CALL getTableGridLines() throw (css::uno::RuntimeException); virtual void SAL_CALL setTableGridLines( ::sal_Bool _tablegridlines ) throw (css::uno::RuntimeException); virtual ::sal_Int32 SAL_CALL getType() throw (css::uno::RuntimeException); virtual void SAL_CALL setType( ::sal_Int32 _type ) throw (css::uno::RuntimeException); // XHelperInterface virtual rtl::OUString& getServiceImplName(); virtual css::uno::Sequence<rtl::OUString> getServiceNames(); }; #endif /* SW_VBA_VIEW_HXX */
1,024
678
<filename>tests/trainers/test_classification.py # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. import os from typing import Any, Dict, Generator, Type, cast import pytest import timm from _pytest.monkeypatch import MonkeyPatch from omegaconf import OmegaConf from pytorch_lightning import LightningDataModule, Trainer from torch.nn.modules import Module from torchgeo.datamodules import ( BigEarthNetDataModule, EuroSATDataModule, RESISC45DataModule, So2SatDataModule, UCMercedDataModule, ) from torchgeo.trainers import ClassificationTask, MultiLabelClassificationTask from .test_utils import ClassificationTestModel def create_model(*args: Any, **kwargs: Any) -> Module: return ClassificationTestModel(**kwargs) class TestClassificationTask: @pytest.mark.parametrize( "name,classname", [ ("eurosat", EuroSATDataModule), ("resisc45", RESISC45DataModule), ("so2sat_supervised", So2SatDataModule), ("so2sat_unsupervised", So2SatDataModule), ("ucmerced", UCMercedDataModule), ], ) def test_trainer( self, monkeypatch: Generator[MonkeyPatch, None, None], name: str, classname: Type[LightningDataModule], ) -> None: if name.startswith("so2sat"): pytest.importorskip("h5py") conf = OmegaConf.load(os.path.join("tests", "conf", name + ".yaml")) conf_dict = OmegaConf.to_object(conf.experiment) conf_dict = cast(Dict[Any, Dict[Any, Any]], conf_dict) # Instantiate datamodule datamodule_kwargs = conf_dict["datamodule"] datamodule = classname(**datamodule_kwargs) # Instantiate model monkeypatch.setattr( # type: ignore[attr-defined] timm, "create_model", create_model ) model_kwargs = conf_dict["module"] model = ClassificationTask(**model_kwargs) # Instantiate trainer trainer = Trainer(fast_dev_run=True, log_every_n_steps=1) trainer.fit(model=model, datamodule=datamodule) trainer.test(model=model, datamodule=datamodule) def test_no_logger(self) -> None: conf = OmegaConf.load(os.path.join("tests", "conf", "ucmerced.yaml")) conf_dict = OmegaConf.to_object(conf.experiment) conf_dict = cast(Dict[Any, Dict[Any, Any]], conf_dict) # Instantiate datamodule datamodule_kwargs = conf_dict["datamodule"] datamodule = UCMercedDataModule(**datamodule_kwargs) # Instantiate model model_kwargs = conf_dict["module"] model = ClassificationTask(**model_kwargs) # Instantiate trainer trainer = Trainer(logger=None, fast_dev_run=True, log_every_n_steps=1) trainer.fit(model=model, datamodule=datamodule) @pytest.fixture def model_kwargs(self) -> Dict[Any, Any]: return { "classification_model": "resnet18", "in_channels": 1, "loss": "ce", "num_classes": 1, "weights": "random", } def test_pretrained(self, model_kwargs: Dict[Any, Any], checkpoint: str) -> None: model_kwargs["weights"] = checkpoint with pytest.warns(UserWarning): ClassificationTask(**model_kwargs) def test_invalid_pretrained( self, model_kwargs: Dict[Any, Any], checkpoint: str ) -> None: model_kwargs["weights"] = checkpoint model_kwargs["classification_model"] = "resnet50" match = "Trying to load resnet18 weights into a resnet50" with pytest.raises(ValueError, match=match): ClassificationTask(**model_kwargs) def test_invalid_loss(self, model_kwargs: Dict[Any, Any]) -> None: model_kwargs["loss"] = "invalid_loss" match = "Loss type 'invalid_loss' is not valid." with pytest.raises(ValueError, match=match): ClassificationTask(**model_kwargs) def test_invalid_model(self, model_kwargs: Dict[Any, Any]) -> None: model_kwargs["classification_model"] = "invalid_model" match = "Model type 'invalid_model' is not a valid timm model." with pytest.raises(ValueError, match=match): ClassificationTask(**model_kwargs) def test_invalid_weights(self, model_kwargs: Dict[Any, Any]) -> None: model_kwargs["weights"] = "invalid_weights" match = "Weight type 'invalid_weights' is not valid." with pytest.raises(ValueError, match=match): ClassificationTask(**model_kwargs) class TestMultiLabelClassificationTask: @pytest.mark.parametrize( "name,classname", [ ("bigearthnet_all", BigEarthNetDataModule), ("bigearthnet_s1", BigEarthNetDataModule), ("bigearthnet_s2", BigEarthNetDataModule), ], ) def test_trainer( self, monkeypatch: Generator[MonkeyPatch, None, None], name: str, classname: Type[LightningDataModule], ) -> None: conf = OmegaConf.load(os.path.join("tests", "conf", name + ".yaml")) conf_dict = OmegaConf.to_object(conf.experiment) conf_dict = cast(Dict[Any, Dict[Any, Any]], conf_dict) # Instantiate datamodule datamodule_kwargs = conf_dict["datamodule"] datamodule = classname(**datamodule_kwargs) # Instantiate model monkeypatch.setattr( # type: ignore[attr-defined] timm, "create_model", create_model ) model_kwargs = conf_dict["module"] model = MultiLabelClassificationTask(**model_kwargs) # Instantiate trainer trainer = Trainer(fast_dev_run=True, log_every_n_steps=1) trainer.fit(model=model, datamodule=datamodule) trainer.test(model=model, datamodule=datamodule) def test_no_logger(self) -> None: conf = OmegaConf.load(os.path.join("tests", "conf", "bigearthnet_s1.yaml")) conf_dict = OmegaConf.to_object(conf.experiment) conf_dict = cast(Dict[Any, Dict[Any, Any]], conf_dict) # Instantiate datamodule datamodule_kwargs = conf_dict["datamodule"] datamodule = BigEarthNetDataModule(**datamodule_kwargs) # Instantiate model model_kwargs = conf_dict["module"] model = MultiLabelClassificationTask(**model_kwargs) # Instantiate trainer trainer = Trainer(logger=None, fast_dev_run=True, log_every_n_steps=1) trainer.fit(model=model, datamodule=datamodule) @pytest.fixture def model_kwargs(self) -> Dict[Any, Any]: return { "classification_model": "resnet18", "in_channels": 1, "loss": "ce", "num_classes": 1, "weights": "random", } def test_invalid_loss(self, model_kwargs: Dict[Any, Any]) -> None: model_kwargs["loss"] = "invalid_loss" match = "Loss type 'invalid_loss' is not valid." with pytest.raises(ValueError, match=match): MultiLabelClassificationTask(**model_kwargs)
3,077
3,651
package com.orientechnologies.common.comparator; import com.orientechnologies.orient.core.index.OCompositeKey; import com.orientechnologies.orient.core.metadata.schema.OType; import java.util.Comparator; import org.junit.Assert; import org.junit.Test; /** * @author <NAME> (a.lomakin-at-orientdb.com) * @author <NAME> * @since 11.07.12 */ public class DefaultComparatorTest { private ODefaultComparator comparator = ODefaultComparator.INSTANCE; @Test public void testCompareStrings() { final OCompositeKey keyOne = new OCompositeKey("name4", OType.STRING); final OCompositeKey keyTwo = new OCompositeKey("name5", OType.STRING); assertCompareTwoKeys(comparator, keyOne, keyTwo); } private void assertCompareTwoKeys( final Comparator<Object> comparator, final Object keyOne, final Object keyTwo) { Assert.assertTrue(comparator.compare(keyOne, keyTwo) < 0); Assert.assertTrue(comparator.compare(keyTwo, keyOne) > 0); Assert.assertTrue(comparator.compare(keyTwo, keyTwo) == 0); } }
366
2,151
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser; import android.content.Context; import android.content.res.Resources; import android.util.TypedValue; import org.chromium.base.CommandLine; import org.chromium.chrome.R; import org.chromium.content_public.common.ContentSwitches; /** * Utility class for application level initialization calls. */ public final class ApplicationInitialization { // Prevent instantiation. private ApplicationInitialization() { } /** * Enable fullscreen related startup flags. * @param resources Resources to use while calculating initialization constants. * @param resControlContainerHeight The resource id for the height of the browser controls. */ public static void enableFullscreenFlags( Resources resources, Context context, int resControlContainerHeight) { CommandLine commandLine = CommandLine.getInstance(); if (commandLine.hasSwitch(ChromeSwitches.DISABLE_FULLSCREEN)) return; TypedValue threshold = new TypedValue(); resources.getValue(R.dimen.top_controls_show_threshold, threshold, true); commandLine.appendSwitchWithValue( ContentSwitches.TOP_CONTROLS_SHOW_THRESHOLD, threshold.coerceToString().toString()); resources.getValue(R.dimen.top_controls_hide_threshold, threshold, true); commandLine.appendSwitchWithValue( ContentSwitches.TOP_CONTROLS_HIDE_THRESHOLD, threshold.coerceToString().toString()); } }
535
2,161
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iceberg.spark.source; import java.io.IOException; import java.math.RoundingMode; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.iceberg.AssertHelpers; import org.apache.iceberg.DataFile; import org.apache.iceberg.FileFormat; import org.apache.iceberg.FileScanTask; import org.apache.iceberg.ManifestFile; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.Table; import org.apache.iceberg.TableProperties; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.io.CloseableIterable; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.relocated.com.google.common.math.LongMath; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.SnapshotUtil; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SparkSession; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import static org.apache.iceberg.types.Types.NestedField.optional; public class TestDataSourceOptions { private static final Configuration CONF = new Configuration(); private static final Schema SCHEMA = new Schema( optional(1, "id", Types.IntegerType.get()), optional(2, "data", Types.StringType.get()) ); private static SparkSession spark = null; @Rule public TemporaryFolder temp = new TemporaryFolder(); @BeforeClass public static void startSpark() { TestDataSourceOptions.spark = SparkSession.builder().master("local[2]").getOrCreate(); } @AfterClass public static void stopSpark() { SparkSession currentSpark = TestDataSourceOptions.spark; TestDataSourceOptions.spark = null; currentSpark.stop(); } @Test public void testWriteFormatOptionOverridesTableProperties() throws IOException { String tableLocation = temp.newFolder("iceberg-table").toString(); HadoopTables tables = new HadoopTables(CONF); PartitionSpec spec = PartitionSpec.unpartitioned(); Map<String, String> options = Maps.newHashMap(); options.put(TableProperties.DEFAULT_FILE_FORMAT, "avro"); Table table = tables.create(SCHEMA, spec, options, tableLocation); List<SimpleRecord> expectedRecords = Lists.newArrayList( new SimpleRecord(1, "a"), new SimpleRecord(2, "b"), new SimpleRecord(3, "c") ); Dataset<Row> df = spark.createDataFrame(expectedRecords, SimpleRecord.class); df.select("id", "data").write() .format("iceberg") .option(SparkWriteOptions.WRITE_FORMAT, "parquet") .mode(SaveMode.Append) .save(tableLocation); try (CloseableIterable<FileScanTask> tasks = table.newScan().planFiles()) { tasks.forEach(task -> { FileFormat fileFormat = FileFormat.fromFileName(task.file().path()); Assert.assertEquals(FileFormat.PARQUET, fileFormat); }); } } @Test public void testNoWriteFormatOption() throws IOException { String tableLocation = temp.newFolder("iceberg-table").toString(); HadoopTables tables = new HadoopTables(CONF); PartitionSpec spec = PartitionSpec.unpartitioned(); Map<String, String> options = Maps.newHashMap(); options.put(TableProperties.DEFAULT_FILE_FORMAT, "avro"); Table table = tables.create(SCHEMA, spec, options, tableLocation); List<SimpleRecord> expectedRecords = Lists.newArrayList( new SimpleRecord(1, "a"), new SimpleRecord(2, "b"), new SimpleRecord(3, "c") ); Dataset<Row> df = spark.createDataFrame(expectedRecords, SimpleRecord.class); df.select("id", "data").write() .format("iceberg") .mode("append") .save(tableLocation); try (CloseableIterable<FileScanTask> tasks = table.newScan().planFiles()) { tasks.forEach(task -> { FileFormat fileFormat = FileFormat.fromFileName(task.file().path()); Assert.assertEquals(FileFormat.AVRO, fileFormat); }); } } @Test public void testHadoopOptions() throws IOException { String tableLocation = temp.newFolder("iceberg-table").toString(); Configuration sparkHadoopConf = spark.sessionState().newHadoopConf(); String originalDefaultFS = sparkHadoopConf.get("fs.default.name"); try { HadoopTables tables = new HadoopTables(CONF); PartitionSpec spec = PartitionSpec.unpartitioned(); Map<String, String> options = Maps.newHashMap(); tables.create(SCHEMA, spec, options, tableLocation); // set an invalid value for 'fs.default.name' in Spark Hadoop config // to verify that 'hadoop.' data source options are propagated correctly sparkHadoopConf.set("fs.default.name", "hdfs://localhost:9000"); List<SimpleRecord> expectedRecords = Lists.newArrayList( new SimpleRecord(1, "a"), new SimpleRecord(2, "b") ); Dataset<Row> originalDf = spark.createDataFrame(expectedRecords, SimpleRecord.class); originalDf.select("id", "data").write() .format("iceberg") .mode("append") .option("hadoop.fs.default.name", "file:///") .save(tableLocation); Dataset<Row> resultDf = spark.read() .format("iceberg") .option("hadoop.fs.default.name", "file:///") .load(tableLocation); List<SimpleRecord> resultRecords = resultDf.orderBy("id") .as(Encoders.bean(SimpleRecord.class)) .collectAsList(); Assert.assertEquals("Records should match", expectedRecords, resultRecords); } finally { sparkHadoopConf.set("fs.default.name", originalDefaultFS); } } @Test public void testSplitOptionsOverridesTableProperties() throws IOException { String tableLocation = temp.newFolder("iceberg-table").toString(); HadoopTables tables = new HadoopTables(CONF); PartitionSpec spec = PartitionSpec.unpartitioned(); Map<String, String> options = Maps.newHashMap(); options.put(TableProperties.SPLIT_SIZE, String.valueOf(128L * 1024 * 1024)); // 128Mb options.put(TableProperties.DEFAULT_FILE_FORMAT, String.valueOf(FileFormat.AVRO)); // Arbitrarily splittable Table icebergTable = tables.create(SCHEMA, spec, options, tableLocation); List<SimpleRecord> expectedRecords = Lists.newArrayList( new SimpleRecord(1, "a"), new SimpleRecord(2, "b") ); Dataset<Row> originalDf = spark.createDataFrame(expectedRecords, SimpleRecord.class); originalDf.select("id", "data") .repartition(1) .write() .format("iceberg") .mode("append") .save(tableLocation); List<DataFile> files = Lists.newArrayList(icebergTable.currentSnapshot().addedFiles()); Assert.assertEquals("Should have written 1 file", 1, files.size()); long fileSize = files.get(0).fileSizeInBytes(); long splitSize = LongMath.divide(fileSize, 2, RoundingMode.CEILING); Dataset<Row> resultDf = spark.read() .format("iceberg") .option(SparkReadOptions.SPLIT_SIZE, String.valueOf(splitSize)) .load(tableLocation); Assert.assertEquals("Spark partitions should match", 2, resultDf.javaRDD().getNumPartitions()); } @Test public void testIncrementalScanOptions() throws IOException { String tableLocation = temp.newFolder("iceberg-table").toString(); HadoopTables tables = new HadoopTables(CONF); PartitionSpec spec = PartitionSpec.unpartitioned(); Map<String, String> options = Maps.newHashMap(); Table table = tables.create(SCHEMA, spec, options, tableLocation); List<SimpleRecord> expectedRecords = Lists.newArrayList( new SimpleRecord(1, "a"), new SimpleRecord(2, "b"), new SimpleRecord(3, "c"), new SimpleRecord(4, "d") ); for (SimpleRecord record : expectedRecords) { Dataset<Row> originalDf = spark.createDataFrame(Lists.newArrayList(record), SimpleRecord.class); originalDf.select("id", "data").write() .format("iceberg") .mode("append") .save(tableLocation); } List<Long> snapshotIds = SnapshotUtil.currentAncestorIds(table); // start-snapshot-id and snapshot-id are both configured. AssertHelpers.assertThrows( "Check both start-snapshot-id and snapshot-id are configured", IllegalArgumentException.class, "Cannot specify start-snapshot-id and end-snapshot-id to do incremental scan", () -> { spark.read() .format("iceberg") .option("snapshot-id", snapshotIds.get(3).toString()) .option("start-snapshot-id", snapshotIds.get(3).toString()) .load(tableLocation).explain(); }); // end-snapshot-id and as-of-timestamp are both configured. AssertHelpers.assertThrows( "Check both start-snapshot-id and snapshot-id are configured", IllegalArgumentException.class, "Cannot specify start-snapshot-id and end-snapshot-id to do incremental scan", () -> { spark.read() .format("iceberg") .option(SparkReadOptions.AS_OF_TIMESTAMP, Long.toString(table.snapshot(snapshotIds.get(3)).timestampMillis())) .option("end-snapshot-id", snapshotIds.get(2).toString()) .load(tableLocation).explain(); }); // only end-snapshot-id is configured. AssertHelpers.assertThrows( "Check both start-snapshot-id and snapshot-id are configured", IllegalArgumentException.class, "Cannot only specify option end-snapshot-id to do incremental scan", () -> { spark.read() .format("iceberg") .option("end-snapshot-id", snapshotIds.get(2).toString()) .load(tableLocation).explain(); }); // test (1st snapshot, current snapshot] incremental scan. List<SimpleRecord> result = spark.read() .format("iceberg") .option("start-snapshot-id", snapshotIds.get(3).toString()) .load(tableLocation) .orderBy("id") .as(Encoders.bean(SimpleRecord.class)) .collectAsList(); Assert.assertEquals("Records should match", expectedRecords.subList(1, 4), result); // test (2nd snapshot, 3rd snapshot] incremental scan. List<SimpleRecord> result1 = spark.read() .format("iceberg") .option("start-snapshot-id", snapshotIds.get(2).toString()) .option("end-snapshot-id", snapshotIds.get(1).toString()) .load(tableLocation) .orderBy("id") .as(Encoders.bean(SimpleRecord.class)) .collectAsList(); Assert.assertEquals("Records should match", expectedRecords.subList(2, 3), result1); } @Test public void testMetadataSplitSizeOptionOverrideTableProperties() throws IOException { String tableLocation = temp.newFolder("iceberg-table").toString(); HadoopTables tables = new HadoopTables(CONF); PartitionSpec spec = PartitionSpec.unpartitioned(); Map<String, String> options = Maps.newHashMap(); Table table = tables.create(SCHEMA, spec, options, tableLocation); List<SimpleRecord> expectedRecords = Lists.newArrayList( new SimpleRecord(1, "a"), new SimpleRecord(2, "b") ); Dataset<Row> originalDf = spark.createDataFrame(expectedRecords, SimpleRecord.class); // produce 1st manifest originalDf.select("id", "data").write() .format("iceberg") .mode("append") .save(tableLocation); // produce 2nd manifest originalDf.select("id", "data").write() .format("iceberg") .mode("append") .save(tableLocation); List<ManifestFile> manifests = table.currentSnapshot().allManifests(); Assert.assertEquals("Must be 2 manifests", 2, manifests.size()); // set the target metadata split size so each manifest ends up in a separate split table.updateProperties() .set(TableProperties.METADATA_SPLIT_SIZE, String.valueOf(manifests.get(0).length())) .commit(); Dataset<Row> entriesDf = spark.read() .format("iceberg") .load(tableLocation + "#entries"); Assert.assertEquals("Num partitions must match", 2, entriesDf.javaRDD().getNumPartitions()); // override the table property using options entriesDf = spark.read() .format("iceberg") .option(SparkReadOptions.SPLIT_SIZE, String.valueOf(128 * 1024 * 1024)) .load(tableLocation + "#entries"); Assert.assertEquals("Num partitions must match", 1, entriesDf.javaRDD().getNumPartitions()); } @Test public void testDefaultMetadataSplitSize() throws IOException { String tableLocation = temp.newFolder("iceberg-table").toString(); HadoopTables tables = new HadoopTables(CONF); PartitionSpec spec = PartitionSpec.unpartitioned(); Map<String, String> options = Maps.newHashMap(); tables.create(SCHEMA, spec, options, tableLocation); List<SimpleRecord> expectedRecords = Lists.newArrayList( new SimpleRecord(1, "a"), new SimpleRecord(2, "b") ); Dataset<Row> originalDf = spark.createDataFrame(expectedRecords, SimpleRecord.class); originalDf.select("id", "data").write() .format("iceberg") .mode("append") .save(tableLocation); int splitSize = (int) TableProperties.METADATA_SPLIT_SIZE_DEFAULT; // 32MB split size int expectedSplits = ((int) tables.load(tableLocation + "#entries") .currentSnapshot().allManifests().get(0).length() + splitSize - 1) / splitSize; Dataset<Row> metadataDf = spark.read() .format("iceberg") .load(tableLocation + "#entries"); int partitionNum = metadataDf.javaRDD().getNumPartitions(); Assert.assertEquals("Spark partitions should match", expectedSplits, partitionNum); } @Test public void testExtraSnapshotMetadata() throws IOException { String tableLocation = temp.newFolder("iceberg-table").toString(); HadoopTables tables = new HadoopTables(CONF); tables.create(SCHEMA, PartitionSpec.unpartitioned(), Maps.newHashMap(), tableLocation); List<SimpleRecord> expectedRecords = Lists.newArrayList( new SimpleRecord(1, "a"), new SimpleRecord(2, "b") ); Dataset<Row> originalDf = spark.createDataFrame(expectedRecords, SimpleRecord.class); originalDf.select("id", "data").write() .format("iceberg") .mode("append") .option(SparkWriteOptions.SNAPSHOT_PROPERTY_PREFIX + ".extra-key", "someValue") .option(SparkWriteOptions.SNAPSHOT_PROPERTY_PREFIX + ".another-key", "anotherValue") .save(tableLocation); Table table = tables.load(tableLocation); Assert.assertTrue(table.currentSnapshot().summary().get("extra-key").equals("someValue")); Assert.assertTrue(table.currentSnapshot().summary().get("another-key").equals("anotherValue")); } }
5,982
2,372
<reponame>ceti-dev/PhysX<filename>physx/source/physxcooking/src/convex/BigConvexDataBuilder.h // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of NVIDIA CORPORATION nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ''AS IS'' AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY // OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Copyright (c) 2008-2021 NVIDIA Corporation. All rights reserved. // Copyright (c) 2004-2008 AGEIA Technologies, Inc. All rights reserved. // Copyright (c) 2001-2004 NovodeX AG. All rights reserved. #ifndef BIG_CONVEX_DATA_BUILDER_H #define BIG_CONVEX_DATA_BUILDER_H #include "foundation/PxMemory.h" #include "PsVecMath.h" namespace physx { struct HullTriangleData; class BigConvexData; class ConvexHullBuilder; ////////////////////////////////////////////////////////////////////////// //! Valencies creation structure struct ValenciesCreate { //! Constructor ValenciesCreate() { PxMemZero(this, sizeof(*this)); } PxU32 nbVerts; //!< Number of vertices PxU32 nbFaces; //!< Number of faces const PxU32* dFaces; //!< List of faces (triangle list) const PxU16* wFaces; //!< List of faces (triangle list) bool adjacentList; //!< Compute list of adjacent vertices or not }; ////////////////////////////////////////////////////////////////////////// class BigConvexDataBuilder : public Ps::UserAllocated { public: BigConvexDataBuilder(const Gu::ConvexHullData* hull, BigConvexData* gm, const PxVec3* hullVerts); ~BigConvexDataBuilder(); // Support vertex map bool precompute(PxU32 subdiv); bool initialize(); bool save(PxOutputStream& stream, bool platformMismatch) const; bool computeValencies(const ConvexHullBuilder& meshBuilder); //~Support vertex map // Valencies /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /** * Computes valencies and adjacent vertices. * After the call, get results with the appropriate accessors. * * \param vc [in] creation structure * \return true if success. */ /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// bool compute(const ValenciesCreate& vc) const; bool saveValencies(PxOutputStream& stream, bool platformMismatch) const; //~Valencies protected: PX_FORCE_INLINE void precomputeSample(const PxVec3& dir, PxU8& startIndex, float negativeDir); private: const Gu::ConvexHullData* mHull; BigConvexData* mSVM; const PxVec3* mHullVerts; }; } #endif // BIG_CONVEX_DATA_BUILDER_H
1,297
336
<gh_stars>100-1000 #pragma once #include <Framework.h> #include "GraphBase.h" #include <Oscilloscope/Math/ChannelMath.h> class CWndOscGraph : public CWndGraph, public CMathChannel { ui16 m_arrAverageBuf[DivsX*BlkX]; bool m_bPersistReset; bool m_bNeedRedraw; void _PrepareColumn( ui16 *column, ui16 n, ui16 clr ); ui16 _Interpolate( ui16 clrA, ui16 clrB ); public: CWndOscGraph(); virtual void Create(CWnd *pParent, int dwFlags); virtual void OnMessage(CWnd* pSender, int code, uintptr_t data) override; virtual void OnPaint() override; void SetupMarkers( CSettings::Calibrator::FastCalc& Ch1fast, CSettings::Calibrator::FastCalc& Ch2fast, int& nMarkerT1, int& nMarkerT2, int& nMarkerY1, int& nMarkerY2 ); void SetupSelection( bool& bSelection, int& nMarkerT1, int& nMarkerT2 ); void GetCurrentRange(int& nBegin, int& nEnd); void ClearAverage(); void ClearPersist(); void OnPaintTY(); void OnPaintXY(); };
407
881
<gh_stars>100-1000 # -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. Copyright (C) 2017-2021 <NAME>, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from gcloud.utils.validate import ObjectJsonBodyValidator class SetEnabledForPeriodicTaskValidator(ObjectJsonBodyValidator): def validate(self, request, *args, **kwargs): valid, err = super().validate(request, *args, **kwargs) if not valid: return valid, err if not isinstance(self.data.get("enabled"), bool): return False, "enabled must be a bool" return True, "" class ModifyCronValidator(ObjectJsonBodyValidator): def validate(self, request, *args, **kwargs): valid, err = super().validate(request, *args, **kwargs) if not valid: return valid, err if not self.data.get("cron"): return False, "cron can not be empty" return True, "" class ModifyConstantsValidator(ObjectJsonBodyValidator): def validate(self, request, *args, **kwargs): valid, err = super().validate(request, *args, **kwargs) if not valid: return valid, err if not isinstance(self.data.get("constants"), dict): return False, "constants must be a object" return True, ""
665
861
package cn.springcloud.gray.server.dao.repository; import cn.springcloud.gray.server.dao.model.AuthorityDO; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaSpecificationExecutor; import org.springframework.stereotype.Repository; /** * @author saleson * @date 2020-03-21 22:07 */ @Repository public interface AuthorityRepository extends JpaRepository<AuthorityDO, Long>, JpaSpecificationExecutor<AuthorityDO> { AuthorityDO findFirstByRoleAndResource(String role, String resource); }
177
1,408
<filename>plat/nxp/common/nv_storage/plat_nv_storage.h /* * Copyright 2021 NXP * * SPDX-License-Identifier: BSD-3-Clause * */ #ifndef PLAT_NV_STRG_H #define PLAT_NV_STRG_H #define DEFAULT_SET_VALUE 0xA1 #define READY_TO_WRITE_VALUE 0xFF #ifndef NV_STORAGE_BASE_ADDR #define NV_STORAGE_BASE_ADDR DEFAULT_NV_STORAGE_BASE_ADDR #endif typedef struct { uint8_t warm_rst_flag; uint8_t wdt_rst_flag; uint8_t dummy[2]; } nv_app_data_t; /*below enum and above structure should be in-sync. */ enum app_data_offset { WARM_RESET_FLAG_OFFSET, WDT_RESET_FLAG_OFFSET, APP_DATA_MAX_OFFSET, }; int read_nv_app_data(void); int wr_nv_app_data(int data_offset, uint8_t *data, int data_size); const nv_app_data_t *get_nv_data(void); #endif /* PLAT_NV_STRG_H */
357
4,129
<gh_stars>1000+ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: ResourcesInternalLegacy.proto package com.didiglobal.booster.aapt2.legacy; public final class ResourcesInternalLegacy { private ResourcesInternalLegacy() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (com.google.protobuf.ExtensionRegistryLite) registry); } public interface ConfigDescriptionOrBuilder extends // @@protoc_insertion_point(interface_extends:aapt.pb.internal.ConfigDescription) com.google.protobuf.MessageOrBuilder { /** * <code>bytes data = 1;</code> * @return The data. */ com.google.protobuf.ByteString getData(); /** * <code>string product = 2;</code> * @return The product. */ java.lang.String getProduct(); /** * <code>string product = 2;</code> * @return The bytes for product. */ com.google.protobuf.ByteString getProductBytes(); } /** * <pre> * A configuration description that wraps the binary form of the C++ class * aapt::ConfigDescription, with an added product definition. * TODO(adamlesinski): Flesh this out to be represented in proto. * </pre> * * Protobuf type {@code aapt.pb.internal.ConfigDescription} */ public static final class ConfigDescription extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:aapt.pb.internal.ConfigDescription) ConfigDescriptionOrBuilder { private static final long serialVersionUID = 0L; // Use ConfigDescription.newBuilder() to construct. private ConfigDescription(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ConfigDescription() { data_ = com.google.protobuf.ByteString.EMPTY; product_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ConfigDescription(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ConfigDescription( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { data_ = input.readBytes(); break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); product_ = s; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_ConfigDescription_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_ConfigDescription_fieldAccessorTable .ensureFieldAccessorsInitialized( com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.class, com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.Builder.class); } public static final int DATA_FIELD_NUMBER = 1; private com.google.protobuf.ByteString data_; /** * <code>bytes data = 1;</code> * @return The data. */ @java.lang.Override public com.google.protobuf.ByteString getData() { return data_; } public static final int PRODUCT_FIELD_NUMBER = 2; private volatile java.lang.Object product_; /** * <code>string product = 2;</code> * @return The product. */ @java.lang.Override public java.lang.String getProduct() { java.lang.Object ref = product_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); product_ = s; return s; } } /** * <code>string product = 2;</code> * @return The bytes for product. */ @java.lang.Override public com.google.protobuf.ByteString getProductBytes() { java.lang.Object ref = product_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); product_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!data_.isEmpty()) { output.writeBytes(1, data_); } if (!getProductBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, product_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!data_.isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, data_); } if (!getProductBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, product_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription)) { return super.equals(obj); } com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription other = (com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription) obj; if (!getData() .equals(other.getData())) return false; if (!getProduct() .equals(other.getProduct())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DATA_FIELD_NUMBER; hash = (53 * hash) + getData().hashCode(); hash = (37 * hash) + PRODUCT_FIELD_NUMBER; hash = (53 * hash) + getProduct().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A configuration description that wraps the binary form of the C++ class * aapt::ConfigDescription, with an added product definition. * TODO(adamlesinski): Flesh this out to be represented in proto. * </pre> * * Protobuf type {@code aapt.pb.internal.ConfigDescription} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:aapt.pb.internal.ConfigDescription) com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescriptionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_ConfigDescription_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_ConfigDescription_fieldAccessorTable .ensureFieldAccessorsInitialized( com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.class, com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.Builder.class); } // Construct using com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); data_ = com.google.protobuf.ByteString.EMPTY; product_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_ConfigDescription_descriptor; } @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription getDefaultInstanceForType() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.getDefaultInstance(); } @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription build() { com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription buildPartial() { com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription result = new com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription(this); result.data_ = data_; result.product_ = product_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription) { return mergeFrom((com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription other) { if (other == com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.getDefaultInstance()) return this; if (other.getData() != com.google.protobuf.ByteString.EMPTY) { setData(other.getData()); } if (!other.getProduct().isEmpty()) { product_ = other.product_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY; /** * <code>bytes data = 1;</code> * @return The data. */ @java.lang.Override public com.google.protobuf.ByteString getData() { return data_; } /** * <code>bytes data = 1;</code> * @param value The data to set. * @return This builder for chaining. */ public Builder setData(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } data_ = value; onChanged(); return this; } /** * <code>bytes data = 1;</code> * @return This builder for chaining. */ public Builder clearData() { data_ = getDefaultInstance().getData(); onChanged(); return this; } private java.lang.Object product_ = ""; /** * <code>string product = 2;</code> * @return The product. */ public java.lang.String getProduct() { java.lang.Object ref = product_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); product_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>string product = 2;</code> * @return The bytes for product. */ public com.google.protobuf.ByteString getProductBytes() { java.lang.Object ref = product_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); product_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>string product = 2;</code> * @param value The product to set. * @return This builder for chaining. */ public Builder setProduct( java.lang.String value) { if (value == null) { throw new NullPointerException(); } product_ = value; onChanged(); return this; } /** * <code>string product = 2;</code> * @return This builder for chaining. */ public Builder clearProduct() { product_ = getDefaultInstance().getProduct(); onChanged(); return this; } /** * <code>string product = 2;</code> * @param value The bytes for product to set. * @return This builder for chaining. */ public Builder setProductBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); product_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:aapt.pb.internal.ConfigDescription) } // @@protoc_insertion_point(class_scope:aapt.pb.internal.ConfigDescription) private static final com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription(); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ConfigDescription> PARSER = new com.google.protobuf.AbstractParser<ConfigDescription>() { @java.lang.Override public ConfigDescription parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ConfigDescription(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ConfigDescription> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ConfigDescription> getParserForType() { return PARSER; } @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface CompiledFileLegacyOrBuilder extends // @@protoc_insertion_point(interface_extends:aapt.pb.internal.CompiledFileLegacy) com.google.protobuf.MessageOrBuilder { /** * <pre> * The name of the resource (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return The resourceName. */ java.lang.String getResourceName(); /** * <pre> * The name of the resource (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return The bytes for resourceName. */ com.google.protobuf.ByteString getResourceNameBytes(); /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> * @return Whether the config field is set. */ boolean hasConfig(); /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> * @return The config. */ com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription getConfig(); /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> */ com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescriptionOrBuilder getConfigOrBuilder(); /** * <pre> * The filesystem path to where the source file originated. * Mainly used to display helpful error messages. * </pre> * * <code>string source_path = 3;</code> * @return The sourcePath. */ java.lang.String getSourcePath(); /** * <pre> * The filesystem path to where the source file originated. * Mainly used to display helpful error messages. * </pre> * * <code>string source_path = 3;</code> * @return The bytes for sourcePath. */ com.google.protobuf.ByteString getSourcePathBytes(); } /** * <pre> * The top level message representing an external resource file (layout XML, PNG, etc). * This is used to represent a compiled file before it is linked. Only useful to aapt2. * </pre> * * Protobuf type {@code aapt.pb.internal.CompiledFileLegacy} */ public static final class CompiledFileLegacy extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:aapt.pb.internal.CompiledFileLegacy) CompiledFileLegacyOrBuilder { private static final long serialVersionUID = 0L; // Use CompiledFileLegacy.newBuilder() to construct. private CompiledFileLegacy(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CompiledFileLegacy() { resourceName_ = ""; sourcePath_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CompiledFileLegacy(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CompiledFileLegacy( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); resourceName_ = s; break; } case 18: { com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.Builder subBuilder = null; if (config_ != null) { subBuilder = config_.toBuilder(); } config_ = input.readMessage(com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(config_); config_ = subBuilder.buildPartial(); } break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); sourcePath_ = s; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_CompiledFileLegacy_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_CompiledFileLegacy_fieldAccessorTable .ensureFieldAccessorsInitialized( com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.class, com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Builder.class); } public interface SymbolOrBuilder extends // @@protoc_insertion_point(interface_extends:aapt.pb.internal.CompiledFileLegacy.Symbol) com.google.protobuf.MessageOrBuilder { /** * <pre> * The name of the symbol (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return The resourceName. */ java.lang.String getResourceName(); /** * <pre> * The name of the symbol (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return The bytes for resourceName. */ com.google.protobuf.ByteString getResourceNameBytes(); /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> * @return Whether the source field is set. */ boolean hasSource(); /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> * @return The source. */ com.didiglobal.booster.aapt2.Resources.SourcePosition getSource(); /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> */ com.didiglobal.booster.aapt2.Resources.SourcePositionOrBuilder getSourceOrBuilder(); } /** * Protobuf type {@code aapt.pb.internal.CompiledFileLegacy.Symbol} */ public static final class Symbol extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:aapt.pb.internal.CompiledFileLegacy.Symbol) SymbolOrBuilder { private static final long serialVersionUID = 0L; // Use Symbol.newBuilder() to construct. private Symbol(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Symbol() { resourceName_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new Symbol(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Symbol( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); resourceName_ = s; break; } case 18: { com.didiglobal.booster.aapt2.Resources.SourcePosition.Builder subBuilder = null; if (source_ != null) { subBuilder = source_.toBuilder(); } source_ = input.readMessage(com.didiglobal.booster.aapt2.Resources.SourcePosition.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(source_); source_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_CompiledFileLegacy_Symbol_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_CompiledFileLegacy_Symbol_fieldAccessorTable .ensureFieldAccessorsInitialized( com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol.class, com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol.Builder.class); } public static final int RESOURCE_NAME_FIELD_NUMBER = 1; private volatile java.lang.Object resourceName_; /** * <pre> * The name of the symbol (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * The name of the symbol (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SOURCE_FIELD_NUMBER = 2; private com.didiglobal.booster.aapt2.Resources.SourcePosition source_; /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> * @return Whether the source field is set. */ @java.lang.Override public boolean hasSource() { return source_ != null; } /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> * @return The source. */ @java.lang.Override public com.didiglobal.booster.aapt2.Resources.SourcePosition getSource() { return source_ == null ? com.didiglobal.booster.aapt2.Resources.SourcePosition.getDefaultInstance() : source_; } /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> */ @java.lang.Override public com.didiglobal.booster.aapt2.Resources.SourcePositionOrBuilder getSourceOrBuilder() { return getSource(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getResourceNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (source_ != null) { output.writeMessage(2, getSource()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getResourceNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (source_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getSource()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol)) { return super.equals(obj); } com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol other = (com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (hasSource() != other.hasSource()) return false; if (hasSource()) { if (!getSource() .equals(other.getSource())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); if (hasSource()) { hash = (37 * hash) + SOURCE_FIELD_NUMBER; hash = (53 * hash) + getSource().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code aapt.pb.internal.CompiledFileLegacy.Symbol} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:aapt.pb.internal.CompiledFileLegacy.Symbol) com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.SymbolOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_CompiledFileLegacy_Symbol_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_CompiledFileLegacy_Symbol_fieldAccessorTable .ensureFieldAccessorsInitialized( com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol.class, com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol.Builder.class); } // Construct using com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); resourceName_ = ""; if (sourceBuilder_ == null) { source_ = null; } else { source_ = null; sourceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_CompiledFileLegacy_Symbol_descriptor; } @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol getDefaultInstanceForType() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol.getDefaultInstance(); } @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol build() { com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol buildPartial() { com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol result = new com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol(this); result.resourceName_ = resourceName_; if (sourceBuilder_ == null) { result.source_ = source_; } else { result.source_ = sourceBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol) { return mergeFrom((com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol other) { if (other == com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; onChanged(); } if (other.hasSource()) { mergeSource(other.getSource()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object resourceName_ = ""; /** * <pre> * The name of the symbol (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The name of the symbol (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The name of the symbol (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; onChanged(); return this; } /** * <pre> * The name of the symbol (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); onChanged(); return this; } /** * <pre> * The name of the symbol (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; onChanged(); return this; } private com.didiglobal.booster.aapt2.Resources.SourcePosition source_; private com.google.protobuf.SingleFieldBuilderV3< com.didiglobal.booster.aapt2.Resources.SourcePosition, com.didiglobal.booster.aapt2.Resources.SourcePosition.Builder, com.didiglobal.booster.aapt2.Resources.SourcePositionOrBuilder> sourceBuilder_; /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> * @return Whether the source field is set. */ public boolean hasSource() { return sourceBuilder_ != null || source_ != null; } /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> * @return The source. */ public com.didiglobal.booster.aapt2.Resources.SourcePosition getSource() { if (sourceBuilder_ == null) { return source_ == null ? com.didiglobal.booster.aapt2.Resources.SourcePosition.getDefaultInstance() : source_; } else { return sourceBuilder_.getMessage(); } } /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> */ public Builder setSource(com.didiglobal.booster.aapt2.Resources.SourcePosition value) { if (sourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } source_ = value; onChanged(); } else { sourceBuilder_.setMessage(value); } return this; } /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> */ public Builder setSource( com.didiglobal.booster.aapt2.Resources.SourcePosition.Builder builderForValue) { if (sourceBuilder_ == null) { source_ = builderForValue.build(); onChanged(); } else { sourceBuilder_.setMessage(builderForValue.build()); } return this; } /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> */ public Builder mergeSource(com.didiglobal.booster.aapt2.Resources.SourcePosition value) { if (sourceBuilder_ == null) { if (source_ != null) { source_ = com.didiglobal.booster.aapt2.Resources.SourcePosition.newBuilder(source_).mergeFrom(value).buildPartial(); } else { source_ = value; } onChanged(); } else { sourceBuilder_.mergeFrom(value); } return this; } /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> */ public Builder clearSource() { if (sourceBuilder_ == null) { source_ = null; onChanged(); } else { source_ = null; sourceBuilder_ = null; } return this; } /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> */ public com.didiglobal.booster.aapt2.Resources.SourcePosition.Builder getSourceBuilder() { onChanged(); return getSourceFieldBuilder().getBuilder(); } /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> */ public com.didiglobal.booster.aapt2.Resources.SourcePositionOrBuilder getSourceOrBuilder() { if (sourceBuilder_ != null) { return sourceBuilder_.getMessageOrBuilder(); } else { return source_ == null ? com.didiglobal.booster.aapt2.Resources.SourcePosition.getDefaultInstance() : source_; } } /** * <pre> * The position in the file at which this symbol is defined. For debug use. * </pre> * * <code>.aapt.pb.SourcePosition source = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.didiglobal.booster.aapt2.Resources.SourcePosition, com.didiglobal.booster.aapt2.Resources.SourcePosition.Builder, com.didiglobal.booster.aapt2.Resources.SourcePositionOrBuilder> getSourceFieldBuilder() { if (sourceBuilder_ == null) { sourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.didiglobal.booster.aapt2.Resources.SourcePosition, com.didiglobal.booster.aapt2.Resources.SourcePosition.Builder, com.didiglobal.booster.aapt2.Resources.SourcePositionOrBuilder>( getSource(), getParentForChildren(), isClean()); source_ = null; } return sourceBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:aapt.pb.internal.CompiledFileLegacy.Symbol) } // @@protoc_insertion_point(class_scope:aapt.pb.internal.CompiledFileLegacy.Symbol) private static final com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol(); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Symbol> PARSER = new com.google.protobuf.AbstractParser<Symbol>() { @java.lang.Override public Symbol parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Symbol(input, extensionRegistry); } }; public static com.google.protobuf.Parser<Symbol> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Symbol> getParserForType() { return PARSER; } @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Symbol getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public static final int RESOURCE_NAME_FIELD_NUMBER = 1; private volatile java.lang.Object resourceName_; /** * <pre> * The name of the resource (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * The name of the resource (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CONFIG_FIELD_NUMBER = 2; private com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription config_; /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> * @return Whether the config field is set. */ @java.lang.Override public boolean hasConfig() { return config_ != null; } /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> * @return The config. */ @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription getConfig() { return config_ == null ? com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.getDefaultInstance() : config_; } /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> */ @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescriptionOrBuilder getConfigOrBuilder() { return getConfig(); } public static final int SOURCE_PATH_FIELD_NUMBER = 3; private volatile java.lang.Object sourcePath_; /** * <pre> * The filesystem path to where the source file originated. * Mainly used to display helpful error messages. * </pre> * * <code>string source_path = 3;</code> * @return The sourcePath. */ @java.lang.Override public java.lang.String getSourcePath() { java.lang.Object ref = sourcePath_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); sourcePath_ = s; return s; } } /** * <pre> * The filesystem path to where the source file originated. * Mainly used to display helpful error messages. * </pre> * * <code>string source_path = 3;</code> * @return The bytes for sourcePath. */ @java.lang.Override public com.google.protobuf.ByteString getSourcePathBytes() { java.lang.Object ref = sourcePath_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); sourcePath_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getResourceNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (config_ != null) { output.writeMessage(2, getConfig()); } if (!getSourcePathBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, sourcePath_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getResourceNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } if (config_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getConfig()); } if (!getSourcePathBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, sourcePath_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy)) { return super.equals(obj); } com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy other = (com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (hasConfig() != other.hasConfig()) return false; if (hasConfig()) { if (!getConfig() .equals(other.getConfig())) return false; } if (!getSourcePath() .equals(other.getSourcePath())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); if (hasConfig()) { hash = (37 * hash) + CONFIG_FIELD_NUMBER; hash = (53 * hash) + getConfig().hashCode(); } hash = (37 * hash) + SOURCE_PATH_FIELD_NUMBER; hash = (53 * hash) + getSourcePath().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The top level message representing an external resource file (layout XML, PNG, etc). * This is used to represent a compiled file before it is linked. Only useful to aapt2. * </pre> * * Protobuf type {@code aapt.pb.internal.CompiledFileLegacy} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:aapt.pb.internal.CompiledFileLegacy) com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_CompiledFileLegacy_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_CompiledFileLegacy_fieldAccessorTable .ensureFieldAccessorsInitialized( com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.class, com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.Builder.class); } // Construct using com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); resourceName_ = ""; if (configBuilder_ == null) { config_ = null; } else { config_ = null; configBuilder_ = null; } sourcePath_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.internal_static_aapt_pb_internal_CompiledFileLegacy_descriptor; } @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy getDefaultInstanceForType() { return com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.getDefaultInstance(); } @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy build() { com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy buildPartial() { com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy result = new com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy(this); result.resourceName_ = resourceName_; if (configBuilder_ == null) { result.config_ = config_; } else { result.config_ = configBuilder_.build(); } result.sourcePath_ = sourcePath_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy) { return mergeFrom((com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy other) { if (other == com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; onChanged(); } if (other.hasConfig()) { mergeConfig(other.getConfig()); } if (!other.getSourcePath().isEmpty()) { sourcePath_ = other.sourcePath_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object resourceName_ = ""; /** * <pre> * The name of the resource (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The name of the resource (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The name of the resource (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; onChanged(); return this; } /** * <pre> * The name of the resource (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); onChanged(); return this; } /** * <pre> * The name of the resource (in the form package:type/name). * </pre> * * <code>string resource_name = 1;</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; onChanged(); return this; } private com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription config_; private com.google.protobuf.SingleFieldBuilderV3< com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription, com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.Builder, com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescriptionOrBuilder> configBuilder_; /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> * @return Whether the config field is set. */ public boolean hasConfig() { return configBuilder_ != null || config_ != null; } /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> * @return The config. */ public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription getConfig() { if (configBuilder_ == null) { return config_ == null ? com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.getDefaultInstance() : config_; } else { return configBuilder_.getMessage(); } } /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> */ public Builder setConfig(com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription value) { if (configBuilder_ == null) { if (value == null) { throw new NullPointerException(); } config_ = value; onChanged(); } else { configBuilder_.setMessage(value); } return this; } /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> */ public Builder setConfig( com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.Builder builderForValue) { if (configBuilder_ == null) { config_ = builderForValue.build(); onChanged(); } else { configBuilder_.setMessage(builderForValue.build()); } return this; } /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> */ public Builder mergeConfig(com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription value) { if (configBuilder_ == null) { if (config_ != null) { config_ = com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.newBuilder(config_).mergeFrom(value).buildPartial(); } else { config_ = value; } onChanged(); } else { configBuilder_.mergeFrom(value); } return this; } /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> */ public Builder clearConfig() { if (configBuilder_ == null) { config_ = null; onChanged(); } else { config_ = null; configBuilder_ = null; } return this; } /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> */ public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.Builder getConfigBuilder() { onChanged(); return getConfigFieldBuilder().getBuilder(); } /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> */ public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescriptionOrBuilder getConfigOrBuilder() { if (configBuilder_ != null) { return configBuilder_.getMessageOrBuilder(); } else { return config_ == null ? com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.getDefaultInstance() : config_; } } /** * <pre> * The configuration for which the resource is defined. * </pre> * * <code>.aapt.pb.internal.ConfigDescription config = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription, com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.Builder, com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescriptionOrBuilder> getConfigFieldBuilder() { if (configBuilder_ == null) { configBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription, com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescription.Builder, com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.ConfigDescriptionOrBuilder>( getConfig(), getParentForChildren(), isClean()); config_ = null; } return configBuilder_; } private java.lang.Object sourcePath_ = ""; /** * <pre> * The filesystem path to where the source file originated. * Mainly used to display helpful error messages. * </pre> * * <code>string source_path = 3;</code> * @return The sourcePath. */ public java.lang.String getSourcePath() { java.lang.Object ref = sourcePath_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); sourcePath_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The filesystem path to where the source file originated. * Mainly used to display helpful error messages. * </pre> * * <code>string source_path = 3;</code> * @return The bytes for sourcePath. */ public com.google.protobuf.ByteString getSourcePathBytes() { java.lang.Object ref = sourcePath_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); sourcePath_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The filesystem path to where the source file originated. * Mainly used to display helpful error messages. * </pre> * * <code>string source_path = 3;</code> * @param value The sourcePath to set. * @return This builder for chaining. */ public Builder setSourcePath( java.lang.String value) { if (value == null) { throw new NullPointerException(); } sourcePath_ = value; onChanged(); return this; } /** * <pre> * The filesystem path to where the source file originated. * Mainly used to display helpful error messages. * </pre> * * <code>string source_path = 3;</code> * @return This builder for chaining. */ public Builder clearSourcePath() { sourcePath_ = getDefaultInstance().getSourcePath(); onChanged(); return this; } /** * <pre> * The filesystem path to where the source file originated. * Mainly used to display helpful error messages. * </pre> * * <code>string source_path = 3;</code> * @param value The bytes for sourcePath to set. * @return This builder for chaining. */ public Builder setSourcePathBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); sourcePath_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:aapt.pb.internal.CompiledFileLegacy) } // @@protoc_insertion_point(class_scope:aapt.pb.internal.CompiledFileLegacy) private static final com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy(); } public static com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CompiledFileLegacy> PARSER = new com.google.protobuf.AbstractParser<CompiledFileLegacy>() { @java.lang.Override public CompiledFileLegacy parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CompiledFileLegacy(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CompiledFileLegacy> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CompiledFileLegacy> getParserForType() { return PARSER; } @java.lang.Override public com.didiglobal.booster.aapt2.legacy.ResourcesInternalLegacy.CompiledFileLegacy getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private static final com.google.protobuf.Descriptors.Descriptor internal_static_aapt_pb_internal_ConfigDescription_descriptor; private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_aapt_pb_internal_ConfigDescription_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_aapt_pb_internal_CompiledFileLegacy_descriptor; private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_aapt_pb_internal_CompiledFileLegacy_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_aapt_pb_internal_CompiledFileLegacy_Symbol_descriptor; private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_aapt_pb_internal_CompiledFileLegacy_Symbol_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\035ResourcesInternalLegacy.proto\022\020aapt.pb" + ".internal\032\017Resources.proto\"2\n\021ConfigDesc" + "ription\022\014\n\004data\030\001 \001(\014\022\017\n\007product\030\002 \001(\t\"\277" + "\001\n\022CompiledFileLegacy\022\025\n\rresource_name\030\001" + " \001(\t\0223\n\006config\030\002 \001(\0132#.aapt.pb.internal." + "ConfigDescription\022\023\n\013source_path\030\003 \001(\t\032H" + "\n\006Symbol\022\025\n\rresource_name\030\001 \001(\t\022\'\n\006sourc" + "e\030\002 \001(\0132\027.aapt.pb.SourcePositionB\'\n#com." + "didiglobal.booster.aapt2.legacyH\003b\006proto" + "3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.didiglobal.booster.aapt2.Resources.getDescriptor(), }); internal_static_aapt_pb_internal_ConfigDescription_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_aapt_pb_internal_ConfigDescription_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_aapt_pb_internal_ConfigDescription_descriptor, new java.lang.String[] { "Data", "Product", }); internal_static_aapt_pb_internal_CompiledFileLegacy_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_aapt_pb_internal_CompiledFileLegacy_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_aapt_pb_internal_CompiledFileLegacy_descriptor, new java.lang.String[] { "ResourceName", "Config", "SourcePath", }); internal_static_aapt_pb_internal_CompiledFileLegacy_Symbol_descriptor = internal_static_aapt_pb_internal_CompiledFileLegacy_descriptor.getNestedTypes().get(0); internal_static_aapt_pb_internal_CompiledFileLegacy_Symbol_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_aapt_pb_internal_CompiledFileLegacy_Symbol_descriptor, new java.lang.String[] { "ResourceName", "Source", }); com.didiglobal.booster.aapt2.Resources.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
42,562
302
# -*- coding: utf-8 -*- # Copyright 2016 Yelp Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import import pytest from mock import sentinel from kafka_utils.kafka_cluster_manager.cluster_info.error \ import InvalidPartitionMeasurementError from kafka_utils.kafka_cluster_manager.cluster_info.partition import Partition class TestPartition(object): @pytest.fixture def partition(self): mock_topic = sentinel.t1 mock_topic.id = 't1' return Partition( mock_topic, 0, [sentinel.r1, sentinel.r2], 2, 3, ) def test_name(self, partition): assert partition.name == ('t1', 0) def test_topic(self, partition): assert partition.topic == sentinel.t1 def test_replicas(self, partition): assert partition.replicas == [sentinel.r1, sentinel.r2] def test_leader(self, partition): assert partition.leader == sentinel.r1 def test_weight(self, partition): assert partition.weight == 2 def test_weight_negative(self): mock_topic = sentinel.t1 mock_topic.id = 't1' with pytest.raises(InvalidPartitionMeasurementError): Partition(mock_topic, 0, [sentinel.r1, sentinel.r2], -1, 1) def test_size(self, partition): assert partition.size == 3 def test_size_negative(self): mock_topic = sentinel.t1 mock_topic.id = 't1' with pytest.raises(InvalidPartitionMeasurementError): Partition(mock_topic, 0, [sentinel.r1, sentinel.r2], 1, -1) def test_replication_factor(self, partition): assert partition.replication_factor == 2 def test_partition_id(self, partition): assert partition.partition_id == 0 def test_add_replica(self, partition): new_broker = sentinel.new_r partition.add_replica(new_broker) assert partition.replicas == [sentinel.r1, sentinel.r2, sentinel.new_r] def test_swap_leader(self, partition): b = sentinel.r2 old_replicas = partition.replicas partition.swap_leader(b) # Verify leader changed to b assert partition.leader == b # Verify that replica set remains same assert sorted(old_replicas, key=id) == sorted(partition.replicas, key=id) def test_followers_1(self, partition): # Case:1 With followers assert partition.followers == [sentinel.r2] def test_followers_2(self): # Case:2 No-followers mock_topic = sentinel.t1 mock_topic.id = 't1' p2 = Partition(mock_topic, 0, [sentinel.r1]) assert p2.followers == [] def test_count_siblings(self): t1, t1.id = sentinel.t1, 't1' t2, t2.id = sentinel.t2, 't2' p1, p3, p4 = Partition(t1, 0), Partition(t1, 1), Partition(t2, 0), # verify sibling count p_group = [p1, p4, p3] assert p3.count_siblings(p_group) == 2 assert p4.count_siblings(p_group) == 1 p_group = [p4] assert p1.count_siblings(p_group) == 0 # Empty group p_group = [] assert p1.count_siblings(p_group) == 0 def test_replace(self, partition): curr_broker = partition.replicas[0] partition.replace(curr_broker, sentinel.new_broker) assert partition.replicas[0] == sentinel.new_broker
1,635
2,921
<reponame>otopetrik/nanopb<filename>tests/msgid/encode_msgid.c<gh_stars>1000+ /* Encode a message using msgid field as prefix */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <pb_encode.h> #include "msgid_example.pb.h" #include "test_helpers.h" /* This function writes the message id as a prefix to the message, allowing * the receiving side to identify message type. Here we use uint8_t to store * it, but e.g. varint or some custom header struct would work just as well. */ bool write_prefix(pb_ostream_t *stream, int msgid) { uint8_t prefix = msgid; return pb_write(stream, &prefix, 1); } /* The main logic will call one of these functions. * Normally which function you call would be selected based on what message * you want to send, here it is decided based on command line parameter. */ bool encode_MyMessage1(pb_ostream_t *stream) { MyMessage1 msg = MyMessage1_init_default; msg.intvalue = 1234; return write_prefix(stream, MyMessage1_msgid) && pb_encode(stream, MyMessage1_fields, &msg); } bool encode_MyMessage2(pb_ostream_t *stream) { MyMessage2 msg = MyMessage2_init_default; msg.intvalue = 9999; strcpy(msg.strvalue, "Msg2"); return write_prefix(stream, MyMessage2_msgid) && pb_encode(stream, MyMessage2_fields, &msg); } bool encode_MyMessage3(pb_ostream_t *stream) { MyMessage3 msg = MyMessage3_init_default; msg.boolvalue = true; return write_prefix(stream, MyMessage3_msgid) && pb_encode(stream, MyMessage3_fields, &msg); } int main(int argc, char **argv) { uint8_t buffer[128]; pb_ostream_t stream; bool status = false; int option; if (argc != 2) { fprintf(stderr, "Usage: encode_msgid [number]\n"); return 1; } option = atoi(argv[1]); stream = pb_ostream_from_buffer(buffer, sizeof(buffer)); if (option == 1) { status = encode_MyMessage1(&stream); } else if (option == 2) { status = encode_MyMessage2(&stream); } else if (option == 3) { status = encode_MyMessage3(&stream); } if (status) { SET_BINARY_MODE(stdout); fwrite(buffer, 1, stream.bytes_written, stdout); return 0; } else { fprintf(stderr, "Encoding failed: %s\n", PB_GET_ERROR(&stream)); return 1; } }
982
3,428
{"id":"02211","group":"easy-ham-1","checksum":{"type":"MD5","value":"06ce65e84569f2a40763e9c9ac2ee6cc"},"text":"From <EMAIL> Thu Oct 3 12:24:35 2002\nReturn-Path: <<EMAIL>>\nDelivered-To: y<EMAIL>int.org\nReceived: from localhost (jalapeno [127.0.0.1])\n\tby jmason.org (Postfix) with ESMTP id 8D85816F6B\n\tfor <jm@localhost>; Thu, 3 Oct 2002 12:23:34 +0100 (IST)\nReceived: from jalapeno [127.0.0.1]\n\tby localhost with IMAP (fetchmail-5.9.0)\n\tfor jm@localhost (single-drop); Thu, 03 Oct 2002 12:23:34 +0100 (IST)\nReceived: from dogma.slashnull.org (localhost [127.0.0.1]) by\n dogma.slashnull.org (8.11.6/8.11.6) with ESMTP id g9380KK19938 for\n <<EMAIL>>; Thu, 3 Oct 2002 09:00:26 +0100\nMessage-Id: <<EMAIL>>\nTo: yyyy<EMAIL>int.org\nFrom: diveintomark <<EMAIL>>\nSubject: Insanely complicated\nDate: Thu, 03 Oct 2002 08:00:20 -0000\nContent-Type: text/plain; encoding=utf-8\n\nURL: http://diveintomark.org/archives/2002/10/02.html#insanely_complicated\nDate: 2002-10-02T09:34:51-05:00\n\n_<NAME>_: My faceless enemy has been defeated[1]. &#8220;My arch nemesis \nover the last couple of weeks has been the very poorly documented Hebrew \nTraditional Numbering System.&#8221;\n\n\n\n[1] http://ln.hixie.ch/?start=1033524738&count=1\n\n\n"}
532
8,027
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.jvm.java.lang.model; import javax.lang.model.element.ExecutableElement; /** * A bridge method is used in Java to translate certain language constructs (like overriding methods * with covariant return types) into a form that can be understood by the VM. */ public class BridgeMethod { /** * For a normal override bridge, this is the overriding method. For an accessibility bridge, this * is the same as {@link #to}. */ public final ExecutableElement from; /** * For a normal override bridge, this is the method being overridden. For an accessibility bridge, * this is a public method in a non-public superclass of the current class. */ public final ExecutableElement to; public BridgeMethod(ExecutableElement from, ExecutableElement to) { this.from = from; this.to = to; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } BridgeMethod that = (BridgeMethod) o; if (!from.equals(that.from)) { return false; } return to.equals(that.to); } @Override public int hashCode() { int result = from.hashCode(); result = 31 * result + to.hashCode(); return result; } @Override public String toString() { return "BridgeMethod{" + "from=" + from.getEnclosingElement() + "." + from + ", to=" + to.getEnclosingElement() + "." + to + '}'; } /** * Bridge methods may be generated for public methods of non-public superclasses to (as the * compiler source puts it) "work around a horrible but permanent reflection design error." * * @return true if this is such a bridge method */ public boolean isAccessibilityBridge() { return from == to; } /** * Bridge methods may be generated when an overriding method has a different erasure than the * overridden method, or a different return type. (The VM only sees erased types, and at the VM * level overloading on return type is legal, so these bridge methods are necessary to implement * overriding as the Java language specifies it.) * * @return true if this is such a bridge method */ public boolean isOverrideBridge() { return from != to; } }
930
461
from habu.lib import dnsx def test_mx(): assert 'aspmx.l.google.com.' in dnsx.mx('google.com') def test_ns(): assert 'ns1.google.com.' in dnsx.ns('google.com') def test_axfr_fail(): assert not dnsx.axfr('google.com') def test_axfr_success(): assert dnsx.axfr('zonetransfer.me')
131
348
<reponame>chamberone/Leaflet.PixiOverlay<filename>docs/data/leg-t2/088/08804336.json {"nom":"Ollainville","circ":"4ème circonscription","dpt":"Vosges","inscrits":63,"abs":27,"votants":36,"blancs":1,"nuls":1,"exp":34,"res":[{"nuance":"LR","nom":"<NAME>","voix":20},{"nuance":"SOC","nom":"<NAME>","voix":14}]}
129
2,042
<filename>Demo/src/main/java/com/yc/ycvideoplayer/newPlayer/tiktok/TikTokRenderViewFactory.java package com.yc.ycvideoplayer.newPlayer.tiktok; import android.content.Context; import com.yc.video.surface.InterSurfaceView; import com.yc.video.surface.SurfaceFactory; import com.yc.video.surface.RenderTextureView; public class TikTokRenderViewFactory extends SurfaceFactory { public static TikTokRenderViewFactory create() { return new TikTokRenderViewFactory(); } @Override public InterSurfaceView createRenderView(Context context) { return new TikTokRenderView(new RenderTextureView(context)); } }
217
578
<filename>src/core/XML/XMLFileHandlers.h /* * Copyright (c) 2003-2021 <NAME> <<EMAIL>>. * All rights reserved. Use of the code is allowed under the * Artistic License 2.0 terms, as specified in the LICENSE file * distributed with this code, or available from * http://www.opensource.org/licenses/artistic-license-2.0.php */ #ifndef __XMLFILEHANDLERS_H #define __XMLFILEHANDLERS_H // PWS includes #include "../ItemData.h" #include "../UnknownField.h" #include "../Command.h" #include "../Report.h" #include "os/UUID.h" #include "XMLDefs.h" // Required if testing "USE_XML_LIBRARY" // Entry types enum {NORMAL = 0, ALIAS, SHORTCUT}; // New imported entry struct pw_entry { int id; StringX group; StringX title; StringX username; StringX password; StringX url; StringX autotype; StringX ctime; StringX atime; StringX xtime; StringX xtime_interval; StringX pmtime; StringX rmtime; StringX pwhistory; StringX notes; StringX uuid; StringX run_command; StringX dca; StringX shiftdca; StringX email; StringX symbols; StringX policyname; StringX kbshortcut; unsigned char ucprotected; PWPolicy pwp; int entrytype; bool bforce_normal_entry; }; struct pwhistory_entry { StringX changed; StringX oldpassword; }; typedef std::vector<pw_entry *> vdb_entries; class PWScore; class XMLFileHandlers { // to allow access to protected members #if USE_XML_LIBRARY == MSXML friend class MFileXMLProcessor; #elif USE_XML_LIBRARY == XERCES friend class XFileXMLProcessor; #else friend class PFileXMLProcessor; #endif public: XMLFileHandlers(); virtual ~XMLFileHandlers(); void SetVariables(PWScore *pcore, const bool &bValidation, const stringT &ImportedPrefix, const TCHAR &delimiter, const bool &bImportPSWDsOnly, UUIDVector *pPossible_Aliases, UUIDVector *pPossible_Shortcuts, MultiCommands *pmulticmds, CReport *prpt); bool getIfErrors() const {return m_bErrors;} int getErrorCode() const {return m_iErrorCode;} stringT getErrorMessage() const {return m_strErrorMessage;} stringT getXMLErrors() const {return m_strXMLErrors;} stringT getSkippedList() const {return m_strSkippedList;} stringT getPWHErrorList() const {return m_strPWHErrorList;} stringT getRenameList() const {return m_strRenameList;} vdb_entries & getVDB_Entries() {return m_ventries;} TCHAR getDelimiter() const {return m_delimiter;} int getNumEntries() const {return m_numEntries;} int getNumSkipped() const {return m_numEntriesSkipped;} int getNumRenamed() const {return m_numEntriesRenamed;} int getNumPWHErrors() const {return m_numEntriesPWHErrors;} int getNumNoPolicies() const {return m_numNoPolicies;} int getNumRenamedPolicies() const {return m_numRenamedPolicies;} int getNumShortcutsRemoved() const {return m_numShortcutsRemoved;} int getNumEmptyGroupsImported() const {return m_numEmptyGroupsImported;} bool getDatabaseHeaderErrors() const {return m_bDatabaseHeaderErrors;} bool getRecordHeaderErrors() const {return m_bRecordHeaderErrors;} protected: bool ProcessStartElement(const int icurrent_element); void ProcessEndElement(const int icurrent_element); void AddXMLEntries(); void AddDBPreferences(); PWPolicy currentDB_default_pwp, importDB_default_pwp; vdb_entries m_ventries; pw_entry *m_cur_entry; StringX m_sxElemContent; stringT m_strErrorMessage; stringT m_strXMLErrors; stringT m_strPWHErrorList; stringT m_strRenameList; stringT m_strSkippedList; int m_numEntries; int m_numEntriesSkipped; int m_numEntriesRenamed; int m_numEntriesPWHErrors; int m_numNoPolicies; int m_numRenamedPolicies; int m_numShortcutsRemoved; int m_numEmptyGroupsImported; int m_iErrorCode; TCHAR m_delimiter; bool m_bEntryBeingProcessed; bool m_bPolicyBeingProcessed; bool m_bValidation; bool m_bInPolicyNames, m_bInEmptyGroups; bool m_bErrors, m_bRecordHeaderErrors, m_bDatabaseHeaderErrors; bool m_bImportPSWDsOnly; unsigned char m_ctype; UnknownFieldList m_ukhxl; // For header unknown fields private: // Local variables PWScore *m_pXMLcore; UUIDVector *m_pPossible_Aliases; UUIDVector *m_pPossible_Shortcuts; MultiCommands *m_pmulticmds; CReport *m_prpt; int m_ipwh; int m_fieldlen; bool m_bheader; unsigned char *m_pfield; // Preferences possibly stored in database stringT m_ImportedPrefix; StringX m_PolicyName; PWPolicy m_Named_pwp; PSWDPolicyMap m_MapPSWDPLC; std::map<StringX, StringX> m_mapRenamedPolicies; std::vector<StringX> m_vEmptyGroups; StringX m_sxXML_DateTime; pwhistory_entry *m_cur_pwhistory_entry; }; #endif /* __XMLFILEHANDLERS_H */
1,787
502
""" This is a skeleton script that shows how you can build a Kubeflow pipeline using the Kale SDK. """ """ The only imports you need to convert your python code to a pipeline are these two decorators. """ from kale.sdk import pipeline, step """ Defining a step is as simple as decorating a Python function. When a function is decorated with `@step`, Kale will generate a KFP pipeline step executing the decorated function. Just make sure to `import` all your modules *inside* the function definition, as the code that will run in the Kubeflow pipeline won't have the entire context of the current script. If you are using Rok to take snapshots (see below on how this works with the Kale SDK) and reproduce the current environment in the pipeline step, you can use relative imports to reference local scripts. """ @step(name="my_step") def foo(a): # Using a relative import to another local script will work as long as # you are using rok to snapshot the current environment and mount a clone # of the volume in the pipeline step: # from .script import bar import sys sys.stdout.write(a) # return multiple values. These could be used by different subsequent # pipeline steps. return "Some", "Data" @step(name="second_step") def foo2(b, c): print(b + c) @step(name="third_step") def foo3(b, c): print(b + c) """ You are not restricted to defining all your functions in a single source file. Organize your "step" functions as you like in other local scripts and import then. Treat your functions just like any other Python project, Kale just needs to access the function *objects*, not their original source code. E.g.: ``` # import the `@step` decorated function `processing_step`, from file # `data_processing.py` from .data_processing import processing_step ``` """ """ Define the pipeline: Once you have all your steps (i.e. functions) defined, all you need to do to define and create the pipeline is to call all of these functions from a single "entry-point", just like you would normally do for running your code locally. Use the `pipeline` decorator to tell Kale that this is the function defining the pipeline structure. Decide a pipeline name and an experiment. In Kubeflow Pipelines, experiments are containers of runs. Ideally you should create a new experiment for every new project. Note that until now you have been writing *plain Python*. All the step functions can be written as any other Python function, with no restrictions, and you can even import them from other local files. The `@pipeline` decorated function poses some syntax restrictions, as Kale needs to parse it to create a corresponding pipeline representation. Whenever these restrictions are not met, Kale will try to fail gracefully and inform you how you should fix it. These are the notable constraints: - You can add input arguments to define *pipeline parameters*. All input arguments expect a default value. - The body of the function does not accept arbitrary Python statements. All you can write is function calls, chaining the together with their return arguments. - Each line should contain a function call with its return value. - Use tuple unpacking to return multiple values """ @pipeline(name="my-beautiful-pipeline", experiment="learning-the-kale-sdk") def my_pipeline(parameter="input"): data1, data2 = foo(parameter) foo2(data1, parameter) foo3(data2, parameter) """ Add a script entry-point to call the function from CLI. You can override the default pipeline parameters when calling the pipeline, just remember that only keyword argument are accepted when calling a `@pipeline` decorated function. Once you write the entry-point, you can either run the pipeline locally, or compile and run the pipeline in Kubeflow. ## Local run: ``` python3 skeleton.py ``` That's it. Running the script itself will invoke the `@pipeline` decorated functions. At this point Kale will validate your code and make sure that it can be converted into a pipeline. Then, Kale will start a local execution, so that you can uncover bugs early, before actually submitting the run to Kubeflow. This is a great way to quickly debug what is going on in your code and speed up the development process. ## Compile and run in Kubeflow Compiling the pipeline and running it in Kubeflow Pipelines is extremely easy: ``` python3 skeleton.py --kfp ``` When running the above command, the following things will happen: - Kale validates the current code, to make sure that it can be converted to a pipeline - Rok takes a snapshot of your mounted volumes - Kale creates a new KFP pipelines, using the same docker image as your current environment as base image for the step and seeding clones of your volumes. - Kale creates (if necessary) a new KFP experiment, based on the provided name - Kale uploads a new pipeline definition - Kale starts a new pipeline run """ if __name__ == "__main__": my_pipeline(parameter="test")
1,326
1,346
<gh_stars>1000+ package com.huanchengfly.tieba.post.widgets.theme; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.ColorStateList; import android.content.res.TypedArray; import android.util.AttributeSet; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.appcompat.widget.AppCompatSeekBar; import com.huanchengfly.tieba.post.R; import com.huanchengfly.tieba.post.ui.theme.interfaces.Tintable; import com.huanchengfly.tieba.post.ui.theme.utils.ColorStateListUtils; import com.huanchengfly.tieba.post.ui.theme.utils.ThemeUtils; @SuppressLint("CustomViewStyleable") public class TintSeekBar extends AppCompatSeekBar implements Tintable { private int mBackgroundTintResId; private int mProgressTintResId; private int mProgressBackgroundTintResId; private int mThumbColorResId; public TintSeekBar(@NonNull Context context) { this(context, null); } public TintSeekBar(@NonNull Context context, @Nullable AttributeSet attrs) { this(context, attrs, 0); } public TintSeekBar(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); if (isInEditMode()) { return; } if (attrs == null) { mBackgroundTintResId = 0; mProgressTintResId = 0; mProgressBackgroundTintResId = 0; mThumbColorResId = 0; applyTintColor(); return; } TypedArray array = getContext().obtainStyledAttributes(attrs, R.styleable.TintSeekbar, defStyleAttr, 0); mBackgroundTintResId = array.getResourceId(R.styleable.TintSeekbar_seekbarBackgroundTint, 0); mProgressTintResId = array.getResourceId(R.styleable.TintSeekbar_progressTint, 0); mProgressBackgroundTintResId = array.getResourceId(R.styleable.TintSeekbar_progressBackgroundTint, 0); mThumbColorResId = array.getResourceId(R.styleable.TintSeekbar_thumbColor, 0); array.recycle(); applyTintColor(); } @Override public void tint() { applyTintColor(); } private void applyTintColor() { if (mBackgroundTintResId != 0) { if (getBackground() == null) { setBackgroundColor(ThemeUtils.getColorById(getContext(), mBackgroundTintResId)); } else { setBackgroundTintList(ColorStateList.valueOf(ThemeUtils.getColorById(getContext(), mBackgroundTintResId))); } } if (mProgressTintResId != 0) setProgressTintList(ColorStateList.valueOf(ThemeUtils.getColorById(getContext(), mProgressTintResId))); if (mProgressBackgroundTintResId != 0) { setProgressBackgroundTintList(ColorStateList.valueOf(ThemeUtils.getColorById(getContext(), mProgressBackgroundTintResId))); } if (mThumbColorResId != 0) { setThumbTintList(ColorStateListUtils.createColorStateList(getContext(), mThumbColorResId)); } } }
1,271
4,955
import requests headers = { 'Content-Type': 'application/json', 'X-API-Version': '200', } data = '{"userName":"username123","password":"<PASSWORD>", "authLoginDomain":"local"}' response = requests.post('https://0.0.0.0/rest/login-sessions', headers=headers, data=data, verify=False)
103
965
// Set the edit control to be read-only. m_myEdit.SetReadOnly(TRUE); ASSERT(m_myEdit.GetStyle() & ES_READONLY);
43
369
<gh_stars>100-1000 // Copyright (c) 2017-2022, Mudita <NAME>.o.o. All rights reserved. // For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md #include "ContactsRingtonesTable.hpp" ContactsRingtonesTable::ContactsRingtonesTable(Database *db) : Table(db) {} ContactsRingtonesTable::~ContactsRingtonesTable() {} bool ContactsRingtonesTable::create() { return true; } bool ContactsRingtonesTable::add(ContactsRingtonesTableRow entry) { return db->execute("insert or ignore into contact_ringtones (contact_id, asset_path ) VALUES (%lu, '%q');", entry.contactID, entry.assetPath.c_str()); } bool ContactsRingtonesTable::removeById(uint32_t id) { return db->execute("DELETE FROM contact_ringtones where _id = %u;", id); } bool ContactsRingtonesTable::update(ContactsRingtonesTableRow entry) { return db->execute("UPDATE contact_ringtones SET contact_id = %lu, asset_path = '%q' WHERE _id=%lu;", entry.contactID, entry.assetPath.c_str(), entry.ID); } ContactsRingtonesTableRow ContactsRingtonesTable::getById(uint32_t id) { auto retQuery = db->query("SELECT * FROM contact_ringtones WHERE _id= %lu;", id); if ((retQuery == nullptr) || (retQuery->getRowCount() == 0)) { return ContactsRingtonesTableRow(); } return ContactsRingtonesTableRow{ (*retQuery)[0].getUInt32(), // ID (*retQuery)[1].getUInt32(), // contactID (*retQuery)[2].getString() // assetPath }; } std::vector<ContactsRingtonesTableRow> ContactsRingtonesTable::getLimitOffset(uint32_t offset, uint32_t limit) { auto retQuery = db->query("SELECT * from contact_ringtones ORDER BY contact_id LIMIT %lu OFFSET %lu;", limit, offset); if ((retQuery == nullptr) || (retQuery->getRowCount() == 0)) { return std::vector<ContactsRingtonesTableRow>(); } std::vector<ContactsRingtonesTableRow> ret; do { ret.push_back(ContactsRingtonesTableRow{ (*retQuery)[0].getUInt32(), // ID (*retQuery)[1].getUInt32(), // contactID (*retQuery)[2].getString() // assetPath }); } while (retQuery->nextRow()); return ret; } std::vector<ContactsRingtonesTableRow> ContactsRingtonesTable::getLimitOffsetByField(uint32_t offset, uint32_t limit, ContactRingtonesTableFields field, const char *str) { std::string fieldName; switch (field) { case ContactRingtonesTableFields::AssetPath: fieldName = "asset_path"; break; default: return std::vector<ContactsRingtonesTableRow>(); } auto retQuery = db->query("SELECT * from contact_ringtones WHERE %q='%q' ORDER BY contact_id LIMIT %lu OFFSET %lu;", fieldName.c_str(), str, limit, offset); if ((retQuery == nullptr) || (retQuery->getRowCount() == 0)) { return std::vector<ContactsRingtonesTableRow>(); } std::vector<ContactsRingtonesTableRow> ret; do { ret.push_back(ContactsRingtonesTableRow{ (*retQuery)[0].getUInt32(), // ID (*retQuery)[1].getUInt32(), // contactID (*retQuery)[2].getString() // assetPath }); } while (retQuery->nextRow()); return ret; } uint32_t ContactsRingtonesTable::count() { auto queryRet = db->query("SELECT COUNT(*) FROM contact_ringtones;"); if (!queryRet || queryRet->getRowCount() == 0) { return 0; } return uint32_t{(*queryRet)[0].getUInt32()}; } uint32_t ContactsRingtonesTable::countByFieldId(const char *field, uint32_t id) { auto queryRet = db->query("SELECT COUNT(*) FROM contact_ringtones WHERE %q=%lu;", field, id); if ((queryRet == nullptr) || (queryRet->getRowCount() == 0)) { return 0; } return uint32_t{(*queryRet)[0].getUInt32()}; }
1,961
1,515
<reponame>CodeGuy-007/lightly<gh_stars>1000+ import unittest import numpy as np from lightly.active_learning.scorers.classification import ScorerClassification, _entropy class TestScorerClassification(unittest.TestCase): def test_score_calculation_random(self): n_samples = 10000 n_classes = 10 np.random.seed(42) predictions = np.random.rand(n_samples, n_classes) predictions_normalized = predictions / np.sum(predictions, axis=1)[:, np.newaxis] model_output = predictions_normalized scorer = ScorerClassification(model_output) scores = scorer.calculate_scores() self.assertEqual(set(scores.keys()), set(ScorerClassification.score_names())) for score_name, score in scores.items(): self.assertEqual(score.shape, (n_samples,)) self.assertTrue(all(score >= 0)) self.assertTrue(all(score <= 1)) self.assertEqual(type(score), np.ndarray) def test_score_calculation_specific(self): model_output = [ [0.7, 0.2, 0.1], [0.4, 0.5, 0.1] ] model_output = np.array(model_output) scorer = ScorerClassification(model_output) scores = scorer.calculate_scores() self.assertListEqual(list(scores["uncertainty_least_confidence"]), [(1 - 0.7) / (1 - 1. / 3.), (1 - 0.5) / (1 - 1. / 3.)]) self.assertListEqual(list(scores["uncertainty_margin"]), [1 - (0.7 - 0.2), 1 - (0.5 - 0.4)]) for val1, val2 in zip(scores["uncertainty_entropy"], _entropy(model_output) / np.log2(3)): self.assertAlmostEqual(val1, val2, places=8) def test_score_calculation_binary(self): model_output = [ [0.7], [0.4] ] model_output = np.array(model_output) scorer = ScorerClassification(model_output) scores = scorer.calculate_scores() self.assertListEqual(list(scores["uncertainty_least_confidence"]), [(1 - 0.7) / (1 - 1. / 2.), (1 - 0.6) / (1 - 1. / 2.)]) self.assertListEqual(list(scores["uncertainty_margin"]), [1 - (0.7 - 0.3), 1 - (0.6 - 0.4)]) model_output = np.concatenate([model_output, 1-model_output], axis=1) for val1, val2 in zip(scores["uncertainty_entropy"], _entropy(model_output) / np.log2(2)): self.assertAlmostEqual(val1, val2, places=8) def test_scorer_classification_empty_model_output(self): scorer = ScorerClassification(model_output=[]) scores = scorer.calculate_scores() self.assertEqual(set(scores.keys()), set(ScorerClassification.score_names())) def test_scorer_classification_variable_model_output_dimension(self): for num_samples in range(5): for num_classes in range(5): with self.subTest(msg=f"model_output.shape = ({num_samples},{num_classes})"): if num_samples > 0: preds = [1. / num_samples] * num_classes else: preds = [] model_output = [preds] * num_samples if num_classes == 0 and num_samples > 0: with self.assertRaises(ValueError): scorer = ScorerClassification(model_output=model_output) else: scorer = ScorerClassification(model_output=model_output) scores = scorer.calculate_scores() self.assertEqual(set(scores.keys()), set(ScorerClassification.score_names())) for score_values in scores.values(): self.assertEqual(len(score_values), len(model_output)) self.assertEqual(type(score_values), np.ndarray) def test_scorer_classification_variable_model_output_tensor_order(self): for tensor_order in range(1, 5): model_output = np.ndarray((3,) * tensor_order) with self.subTest(msg=f"model_output.shape = {model_output.shape}"): if tensor_order == 2 or tensor_order == 0: scorer = ScorerClassification(model_output=model_output) scores = scorer.calculate_scores() for score_values in scores.values(): self.assertEqual(type(score_values), np.ndarray) else: with self.assertRaises(ValueError): scorer = ScorerClassification(model_output=model_output)
2,233
3,215
[{"filename": "AFP.conf", "type": "data"}, {"filename": "afpovertcp.cfg", "type": "ASCII text"}, {"filename": "aliases", "type": "ASCII text"}, {"filename": "aliases.db", "type": "Berkeley DB 1.85 (Hash, version 2, native byte-order)"}, {"filename": "apache2", "type": "directory"}, {"filename": "asl", "type": "directory"}, {"filename": "asl.conf", "type": "ASCII text"}, {"filename": "auto_home", "type": "ASCII text"}, {"filename": "auto_master", "type": "ASCII text"}, {"filename": "autofs.conf", "type": "ASCII text"}, {"filename": "bashrc", "type": "ASCII text"}, {"filename": "bashrc_Apple_Terminal", "type": "ASCII text"}, {"filename": "com.apple.screensharing.agent.launchd", "type": "ASCII text, with no line terminators"}, {"filename": "csh.cshrc", "type": "ASCII text"}, {"filename": "csh.login", "type": "ASCII text"}, {"filename": "csh.logout", "type": "ASCII text"}, {"filename": "cups", "type": "directory"}, {"filename": "defaults", "type": "directory"}, {"filename": "dnsextd.conf", "type": "ASCII text"}, {"filename": "emond.d", "type": "directory"}, {"filename": "find.codes", "type": "empty"}, {"filename": "fstab.hd", "type": "ASCII text"}, {"filename": "ftpusers", "type": "ASCII text"}, {"filename": "gettytab", "type": "ASCII text"}, {"filename": "group", "type": "ASCII text"}, {"filename": "hosts", "type": "ASCII text"}, {"filename": "hosts.equiv", "type": "empty"}, {"filename": "irbrc", "type": "Ruby script text, ASCII text"}, {"filename": "kern_loader.conf", "type": "empty"}, {"filename": "krb5.keytab", "type": "Kerberos Keytab file, realm=LKDC:SHA1.15090DD365668FBA1B0D2D3DD43FCB7CB9381160, principal=afpserver/LKDC:SHA1.15090DD365668FBA1B0D2D3DD43FCB7CB9381160, type=1, date=Tue Jul 2 15:52:04 2019, kvno=2"}, {"filename": "localtime", "type": "timezone data, version 2, 4 gmt time flags, 4 std time flags, no leap seconds, 185 transition times, 4 abbreviation chars"}, {"filename": "locate.rc", "type": "ASCII text"}, {"filename": "mach_init.d", "type": "directory"}, {"filename": "mach_init_per_login_session.d", "type": "directory"}, {"filename": "mach_init_per_user.d", "type": "directory"}, {"filename": "mail.rc", "type": "ASCII text"}, {"filename": "man.conf", "type": "ASCII text"}, {"filename": "manpaths", "type": "ASCII text"}, {"filename": "manpaths.d", "type": "directory"}, {"filename": "master.passwd", "type": "ASCII text"}, {"filename": "nanorc", "type": "ASCII text"}, {"filename": "networks", "type": "ASCII text"}, {"filename": "newsyslog.conf", "type": "ASCII text"}, {"filename": "newsyslog.d", "type": "directory"}, {"filename": "nfs.conf", "type": "ASCII text"}, {"filename": "notify.conf", "type": "ASCII text"}, {"filename": "ntp.conf", "type": "ASCII text"}, {"filename": "ntp_opendirectory.conf", "type": "ASCII text"}, {"filename": "openldap", "type": "directory"}, {"filename": "pam.d", "type": "directory"}, {"filename": "passwd", "type": "ASCII text"}, {"filename": "paths", "type": "ASCII text"}, {"filename": "paths.d", "type": "directory"}, {"filename": "periodic", "type": "directory"}, {"filename": "pf.anchors", "type": "directory"}, {"filename": "pf.conf", "type": "ASCII text"}, {"filename": "pf.os", "type": "ASCII text"}, {"filename": "php-fpm.conf.default", "type": "ASCII text"}, {"filename": "php-fpm.d", "type": "directory"}, {"filename": "php.ini.default", "type": "ASCII text"}, {"filename": "postfix", "type": "directory"}, {"filename": "ppp", "type": "directory"}, {"filename": "profile", "type": "ASCII text"}, {"filename": "protocols", "type": "ASCII text"}, {"filename": "racoon", "type": "directory"}, {"filename": "rc.common", "type": "ASCII text"}, {"filename": "rc.netboot", "type": "POSIX shell script text executable, ASCII text"}, {"filename": "resolv.conf", "type": "ASCII text"}, {"filename": "rmtab", "type": "empty"}, {"filename": "rpc", "type": "ASCII text"}, {"filename": "rtadvd.conf", "type": "ASCII text"}, {"filename": "security", "type": "directory"}, {"filename": "services", "type": "ASCII text"}, {"filename": "services.broker", "type": "ASCII text"}, {"filename": "shells", "type": "ASCII text"}, {"filename": "snmp", "type": "directory"}, {"filename": "ssh", "type": "directory"}, {"filename": "ssl", "type": "directory"}, {"filename": "sudo_lecture", "type": "ASCII text"}, {"filename": "sudoers", "type": "c program text, ASCII text"}, {"filename": "sudoers.d", "type": "directory"}, {"filename": "syslog.conf", "type": "ASCII text"}, {"filename": "thnuclnt", "type": "directory"}, {"filename": "ttys", "type": "ASCII text"}, {"filename": "wfs", "type": "directory"}, {"filename": "xtab", "type": "empty"}, {"filename": "zprofile", "type": "ASCII text"}, {"filename": "zshrc", "type": "ASCII text"}]
1,570
14,668
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef ASH_APP_LIST_MODEL_APP_ICON_LOAD_HELPER_H_ #define ASH_APP_LIST_MODEL_APP_ICON_LOAD_HELPER_H_ #include <memory> #include <string> #include <vector> #include "ash/app_list/model/app_list_item.h" #include "ash/app_list/model/app_list_item_list.h" #include "ash/app_list/model/app_list_item_list_observer.h" #include "ash/app_list/model/app_list_item_observer.h" #include "ash/app_list/model/app_list_model_export.h" #include "base/callback.h" #include "base/scoped_observation.h" namespace ash { // Helper to trigger icon load of AppListeItem when its icon version number // changes. It could be created for either a single AppListItem, or an // AppListItemList for a folder where it watch for the first // FolderImage::kNumFolderTopItems (currently 4) AppListItem in the list. class APP_LIST_MODEL_EXPORT AppIconLoadHelper : public AppListItemObserver { public: using IconLoadCallback = base::RepeatingCallback<void(const std::string&)>; AppIconLoadHelper(AppListItem* app_item, IconLoadCallback icon_load_callback); AppIconLoadHelper(AppListItemList* list, IconLoadCallback icon_load_callback); AppIconLoadHelper(const AppIconLoadHelper&) = delete; AppIconLoadHelper& operator=(const AppIconLoadHelper&) = delete; ~AppIconLoadHelper() override; private: class AppItemHelper; class AppItemListHelper; std::unique_ptr<AppItemHelper> item_helper_; std::unique_ptr<AppItemListHelper> list_helper_; }; } // namespace ash #endif // ASH_APP_LIST_MODEL_APP_ICON_LOAD_HELPER_H_
567
1,374
<reponame>norzak/jsweet package source.extension; public class ToBeSorted { public void myMethod() { } static void myStaticMethod() { } }
62
1,765
package io.envoyproxy.pgv; import java.sql.Ref; import java.util.concurrent.ConcurrentHashMap; /** * {@code ReflectiveValidatorIndex} uses reflection to discover {@link Validator} implementations lazily the first * time a type is validated. If no validator can be found for {@code type}, a fallback validator * will be used (default ALWAYS_VALID). */ public final class ReflectiveValidatorIndex implements ValidatorIndex { private final ConcurrentHashMap<Class, Validator> VALIDATOR_INDEX = new ConcurrentHashMap<>(); private final ValidatorIndex fallbackIndex; public ReflectiveValidatorIndex() { this(ValidatorIndex.ALWAYS_VALID); } /** * @param fallbackIndex a {@link ValidatorIndex} implementation to use if reflective validator discovery fails. */ public ReflectiveValidatorIndex(ValidatorIndex fallbackIndex) { this.fallbackIndex = fallbackIndex; } /** * Returns the validator for {@code <T>}, or {@code ALWAYS_VALID} if not found. */ @Override @SuppressWarnings("unchecked") public <T> Validator<T> validatorFor(Class clazz) { return VALIDATOR_INDEX.computeIfAbsent(clazz, c -> { try { return reflectiveValidatorFor(c); } catch (ReflectiveOperationException ex) { return fallbackIndex.validatorFor(clazz); } }); } @SuppressWarnings("unchecked") private Validator reflectiveValidatorFor(Class clazz) throws ReflectiveOperationException { Class enclosingClass = clazz; while (enclosingClass.getEnclosingClass() != null) { enclosingClass = enclosingClass.getEnclosingClass(); } String validatorClassName = enclosingClass.getName() + "Validator"; Class validatorClass = clazz.getClassLoader().loadClass(validatorClassName); ValidatorImpl impl = (ValidatorImpl) validatorClass.getDeclaredMethod("validatorFor", Class.class).invoke(null, clazz); return proto -> impl.assertValid(proto, ReflectiveValidatorIndex.this); } }
750
4,879
package com.mapswithme.maps.adapter; public interface AdapterIndexAndPosition { int getRelativePosition(); int getIndex(); }
39
1,658
#pragma once #ifndef GUJIA_GUJIA_EPOLL_IMPL_H #define GUJIA_GUJIA_EPOLL_IMPL_H #include "gujia.h" #if defined(GUJIA_HAS_EPOLL) namespace gujia { template <typename T, size_t SIZE> int EventLoop<T, SIZE>::AddEvent(int fd, int mask) { mask |= masks_[fd]; /* Merge old events */ if (mask == masks_[fd]) { return 0; } /* If the fd was already monitored for some event, we need a MOD * operation. Otherwise we need an ADD operation. */ int op = masks_[fd] == kNone ? EPOLL_CTL_ADD : EPOLL_CTL_MOD; struct epoll_event ee = {0}; if (mask & kReadable) ee.events |= EPOLLIN; if (mask & kWritable) ee.events |= EPOLLOUT; ee.data.fd = fd; if (epoll_ctl(el_fd_, op, fd, &ee) == -1) return -1; masks_[fd] = mask; return 0; } template <typename T, size_t SIZE> int EventLoop<T, SIZE>::DelEvent(int fd, int del_mask) { int mask = masks_[fd] & (~del_mask); if (mask == masks_[fd]) { return 0; } struct epoll_event ee = {0}; ee.data.fd = fd; if (mask != kNone) { if (mask & kReadable) ee.events |= EPOLLIN; if (mask & kWritable) ee.events |= EPOLLOUT; epoll_ctl(el_fd_, EPOLL_CTL_MOD, fd, &ee); } else { /* Note, Kernel < 2.6.9 requires a non null event pointer even for * EPOLL_CTL_DEL. */ epoll_ctl(el_fd_, EPOLL_CTL_DEL, fd, &ee); } masks_[fd] = mask; return 0; } template <typename T, size_t SIZE> int EventLoop<T, SIZE>::Poll(const struct timeval* tvp) { return epoll_wait(el_fd_, events_.data(), static_cast<int>(events_.size()), tvp ? (tvp->tv_sec * 1000 + tvp->tv_usec / 1000) : -1); } template <typename T, size_t SIZE> int EventLoop<T, SIZE>::GetEventFD(const Event& e) { return e.data.fd; } template <typename T, size_t SIZE> bool EventLoop<T, SIZE>::IsEventReadable(const Event& e) { return e.events & EPOLLIN; } template <typename T, size_t SIZE> bool EventLoop<T, SIZE>::IsEventWritable(const Event& e) { return e.events & (EPOLLOUT | EPOLLERR | EPOLLHUP); } template <typename T, size_t SIZE> int EventLoop<T, SIZE>::Open() { return epoll_create(1024); /* 1024 is just a hint for the kernel */ } } // namespace gujia #endif #endif // GUJIA_GUJIA_EPOLL_IMPL_H
972
6,989
<gh_stars>1000+ #pragma once #define THROWING #ifdef __cplusplus #define THROWING noexcept extern "C" { #endif const char* GetLibDir() THROWING; const char* GetPyLib() THROWING; #ifdef __cplusplus } #endif
88
482
package com.jmrapp.terralegion.game.world.entity; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.jmrapp.terralegion.engine.views.drawables.Drawable; import com.jmrapp.terralegion.engine.world.entity.BodyType; import com.jmrapp.terralegion.engine.world.entity.WorldBody; import com.jmrapp.terralegion.game.utils.LightUtils; public abstract class TexturedEntity extends WorldBody { protected Drawable drawable; protected float speed; public TexturedEntity(Drawable drawable, float x, float y, BodyType bodyType, float speed) { super(x, y, drawable.getWidth(), drawable.getHeight(), bodyType); this.drawable = drawable; this.speed = speed; } public void render(SpriteBatch sb, double lightValue) { float value = (float) (lightValue < LightUtils.MIN_LIGHT_VALUE ? LightUtils.MIN_LIGHT_VALUE : lightValue); sb.setColor(value, value, value, 1); drawable.render(sb, x, y); sb.setColor(Color.WHITE); } public float getSpeed() { return speed; } }
381
1,656
""" Set 'deleted' flag from null to false on all certificates once Revision ID: 318b66568358 Revises: 9f79024fe67b Create Date: 2019-02-05 15:42:25.477587 """ # revision identifiers, used by Alembic. revision = "318b66568358" down_revision = "9f79024fe67b" from alembic import op def upgrade(): connection = op.get_bind() # Delete duplicate entries connection.execute("UPDATE certificates SET deleted = false WHERE deleted IS NULL") def downgrade(): pass
159
468
<gh_stars>100-1000 #define GLI_INCLUDE_GL_INTEL_MAP_TEXTURE enum Main { GL_TEXTURE_MEMORY_LAYOUT_INTEL = 0x83FF, //GL_LAYOUT_DEFAULT_INTEL = 0, //GL_LAYOUT_LINEAR_INTEL = 1, // GL_LAYOUT_LINEAR_CPU_CACHED_INTEL = 2, }; void glSyncTextureINTEL(GLuint texture); void glUnmapTexture2DINTEL(GLuint texture, GLint level); void *glMapTexture2DINTEL(GLuint texture, GLint level, GLbitfield access, GLint *stride, GLenum[Main] *layout);
233
892
<filename>advisories/unreviewed/2022/05/GHSA-24hw-jxqf-4vc6/GHSA-24hw-jxqf-4vc6.json { "schema_version": "1.2.0", "id": "GHSA-24hw-jxqf-4vc6", "modified": "2022-05-02T03:59:34Z", "published": "2022-05-02T03:59:34Z", "aliases": [ "CVE-2009-4925" ], "details": "Multiple SQL injection vulnerabilities in Portale e-commerce Creasito (aka creasito e-commerce content manager) 1.3.16, when magic_quotes_gpc is disabled, allow remote attackers to execute arbitrary SQL commands via the username parameter to (1) admin/checkuser.php and (2) checkuser.php.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-4925" }, { "type": "WEB", "url": "http://secunia.com/advisories/34809" }, { "type": "WEB", "url": "http://www.exploit-db.com/exploits/8497" }, { "type": "WEB", "url": "http://www.securityfocus.com/archive/1/502818/100/0/threaded" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/34605" } ], "database_specific": { "cwe_ids": [ "CWE-89" ], "severity": "MODERATE", "github_reviewed": false } }
576
648
{"resourceType":"ValueSet","id":"list-status","meta":{"lastUpdated":"2015-10-24T07:41:03.495+11:00","profile":["http://hl7.org/fhir/StructureDefinition/valueset-shareable-definition"]},"text":{"status":"generated","div":"<div xmlns=\"http://www.w3.org/1999/xhtml\">\n <h2>ListStatus</h2>\n <p>The current state of the list</p>\n <p>This value set has an inline code system http://hl7.org/fhir/list-status, which defines the following codes:</p>\n <table class=\"codes\">\n <tr>\n <td>\n <b>Code</b>\n </td>\n <td>\n <b>Display</b>\n </td>\n <td>\n <b>Definition</b>\n </td>\n </tr>\n <tr>\n <td>current\n <a name=\"current\"> </a>\n </td>\n <td>Current</td>\n <td>The list is considered to be an active part of the patient's record.</td>\n </tr>\n <tr>\n <td>retired\n <a name=\"retired\"> </a>\n </td>\n <td>Retired</td>\n <td>The list is &quot;old&quot; and should no longer be considered accurate or relevant.</td>\n </tr>\n <tr>\n <td>entered-in-error\n <a name=\"entered-in-error\"> </a>\n </td>\n <td>Entered In Error</td>\n <td>The list was never accurate. It is retained for medico-legal purposes only.</td>\n </tr>\n </table>\n </div>"},"extension":[{"url":"http://hl7.org/fhir/StructureDefinition/valueset-oid","valueUri":"urn:oid:2.16.840.1.113883.4.642.2.178"}],"url":"http://hl7.org/fhir/ValueSet/list-status","version":"1.0.2","name":"ListStatus","status":"draft","experimental":false,"publisher":"HL7 (FHIR Project)","contact":[{"telecom":[{"system":"other","value":"http://hl7.org/fhir"},{"system":"email","value":"<EMAIL>"}]}],"date":"2015-10-24T07:41:03+11:00","description":"The current state of the list","codeSystem":{"extension":[{"url":"http://hl7.org/fhir/StructureDefinition/valueset-oid","valueUri":"urn:oid:2.16.840.1.113883.4.642.1.178"}],"system":"http://hl7.org/fhir/list-status","version":"1.0.2","caseSensitive":true,"concept":[{"code":"current","display":"Current","definition":"The list is considered to be an active part of the patient's record."},{"code":"retired","display":"Retired","definition":"The list is \"old\" and should no longer be considered accurate or relevant."},{"code":"entered-in-error","display":"Entered In Error","definition":"The list was never accurate. It is retained for medico-legal purposes only."}]}}
1,344
339
<gh_stars>100-1000 ''' (c) 2011, 2012 Georgia Tech Research Corporation This source code is released under the New BSD license. Please see http://wiki.quantsoftware.org/index.php?title=QSTK_License for license details. Created on January, 24, 2013 @author: <NAME> @contact: <EMAIL> @summary: Example tutorial code. ''' # QSTK Imports import QSTK.qstkutil.qsdateutil as du import QSTK.qstkutil.tsutil as tsu import QSTK.qstkutil.DataAccess as da # Third Party Imports import datetime as dt import matplotlib.pyplot as plt import pandas as pd import numpy as np def main(): ''' Main Function''' # Reading the portfolio na_portfolio = np.loadtxt('tutorial3portfolio.csv', dtype='S5,f4', delimiter=',', comments="#", skiprows=1) print na_portfolio # Sorting the portfolio by symbol name na_portfolio = sorted(na_portfolio, key=lambda x: x[0]) print na_portfolio # Create two list for symbol names and allocation ls_port_syms = [] lf_port_alloc = [] for port in na_portfolio: ls_port_syms.append(port[0]) lf_port_alloc.append(port[1]) # Creating an object of the dataaccess class with Yahoo as the source. c_dataobj = da.DataAccess('Yahoo') ls_all_syms = c_dataobj.get_all_symbols() # Bad symbols are symbols present in portfolio but not in all syms ls_bad_syms = list(set(ls_port_syms) - set(ls_all_syms)) if len(ls_bad_syms) != 0: print "Portfolio contains bad symbols : ", ls_bad_syms for s_sym in ls_bad_syms: i_index = ls_port_syms.index(s_sym) ls_port_syms.pop(i_index) lf_port_alloc.pop(i_index) # Reading the historical data. dt_end = dt.datetime(2011, 1, 1) dt_start = dt_end - dt.timedelta(days=1095) # Three years # We need closing prices so the timestamp should be hours=16. dt_timeofday = dt.timedelta(hours=16) # Get a list of trading days between the start and the end. ldt_timestamps = du.getNYSEdays(dt_start, dt_end, dt_timeofday) # Keys to be read from the data, it is good to read everything in one go. ls_keys = ['open', 'high', 'low', 'close', 'volume', 'actual_close'] # Reading the data, now d_data is a dictionary with the keys above. # Timestamps and symbols are the ones that were specified before. ldf_data = c_dataobj.get_data(ldt_timestamps, ls_port_syms, ls_keys) d_data = dict(zip(ls_keys, ldf_data)) # Copying close price into separate dataframe to find rets df_rets = d_data['close'].copy() # Filling the data. df_rets = df_rets.fillna(method='ffill') df_rets = df_rets.fillna(method='bfill') df_rets = df_rets.fillna(1.0) # Numpy matrix of filled data values na_rets = df_rets.values # returnize0 works on ndarray and not dataframes. tsu.returnize0(na_rets) # Estimate portfolio returns na_portrets = np.sum(na_rets * lf_port_alloc, axis=1) na_port_total = np.cumprod(na_portrets + 1) na_component_total = np.cumprod(na_rets + 1, axis=0) # Plotting the results plt.clf() fig = plt.figure() fig.add_subplot(111) plt.plot(ldt_timestamps, na_component_total, alpha=0.4) plt.plot(ldt_timestamps, na_port_total) ls_names = ls_port_syms ls_names.append('Portfolio') plt.legend(ls_names) plt.ylabel('Cumulative Returns') plt.xlabel('Date') fig.autofmt_xdate(rotation=45) plt.savefig('tutorial3.pdf', format='pdf') if __name__ == '__main__': main()
1,548
777
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/chromeos/feedback_util.h" #include "base/bind.h" #include "base/callback.h" #include "base/logging.h" #include "chrome/browser/extensions/api/feedback_private/feedback_private_api.h" #include "chrome/browser/extensions/api/feedback_private/feedback_service.h" #include "chrome/browser/profiles/profile.h" using feedback::FeedbackData; namespace feedback_util { namespace { extensions::FeedbackService* GetFeedbackService(Profile* profile) { return extensions::FeedbackPrivateAPI::GetFactoryInstance() ->Get(profile) ->GetService(); } void OnGetSystemInformation( Profile* profile, const std::string& description, const SendSysLogFeedbackCallback& callback, std::unique_ptr<system_logs::SystemLogsResponse> sys_info) { scoped_refptr<FeedbackData> feedback_data(new FeedbackData()); feedback_data->set_context(profile); feedback_data->set_description(description); feedback_data->SetAndCompressSystemInfo(std::move(sys_info)); GetFeedbackService(profile)->SendFeedback(profile, feedback_data, callback); } } // namespace void SendSysLogFeedback(Profile* profile, const std::string& description, const SendSysLogFeedbackCallback& callback) { GetFeedbackService(profile)->GetSystemInformation( base::Bind(&OnGetSystemInformation, profile, description, callback)); } } // namespace feedback_util
523
3,200
<reponame>GuoSuiming/mindspore<gh_stars>1000+ # Copyright 2019 Huawei Technologies Co., Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """test mnist to mindrecord tool""" import gzip import os import pytest import numpy as np import cv2 from mindspore import log as logger from mindspore.mindrecord import FileReader from mindspore.mindrecord import MnistToMR MNIST_DIR = "../data/mindrecord/testMnistData" FILE_NAME = "mnist" PARTITION_NUM = 4 IMAGE_SIZE = 28 NUM_CHANNELS = 1 @pytest.fixture def fixture_file(): """add/remove file""" def remove_one_file(x): if os.path.exists(x): os.remove(x) def remove_file(): x = "mnist_train.mindrecord" remove_one_file(x) x = "mnist_train.mindrecord.db" remove_one_file(x) x = "mnist_test.mindrecord" remove_one_file(x) x = "mnist_test.mindrecord.db" remove_one_file(x) for i in range(PARTITION_NUM): x = "mnist_train.mindrecord" + str(i) remove_one_file(x) x = "mnist_train.mindrecord" + str(i) + ".db" remove_one_file(x) x = "mnist_test.mindrecord" + str(i) remove_one_file(x) x = "mnist_test.mindrecord" + str(i) + ".db" remove_one_file(x) remove_file() yield "yield_fixture_data" remove_file() def read(train_name, test_name): """test file reader""" count = 0 reader = FileReader(train_name) for _, x in enumerate(reader.get_next()): assert len(x) == 2 count = count + 1 if count == 1: logger.info("data: {}".format(x)) assert count == 20 reader.close() count = 0 reader = FileReader(test_name) for _, x in enumerate(reader.get_next()): assert len(x) == 2 count = count + 1 if count == 1: logger.info("data: {}".format(x)) assert count == 10 reader.close() def test_mnist_to_mindrecord(fixture_file): """test transform mnist dataset to mindrecord.""" mnist_transformer = MnistToMR(MNIST_DIR, FILE_NAME) mnist_transformer.transform() assert os.path.exists("mnist_train.mindrecord") assert os.path.exists("mnist_test.mindrecord") read("mnist_train.mindrecord", "mnist_test.mindrecord") def test_mnist_to_mindrecord_compare_data(fixture_file): """test transform mnist dataset to mindrecord and compare data.""" mnist_transformer = MnistToMR(MNIST_DIR, FILE_NAME) mnist_transformer.transform() assert os.path.exists("mnist_train.mindrecord") assert os.path.exists("mnist_test.mindrecord") train_name, test_name = "mnist_train.mindrecord", "mnist_test.mindrecord" def _extract_images(filename, num_images): """Extract the images into a 4D tensor [image index, y, x, channels].""" with gzip.open(filename) as bytestream: bytestream.read(16) buf = bytestream.read( IMAGE_SIZE * IMAGE_SIZE * num_images * NUM_CHANNELS) data = np.frombuffer(buf, dtype=np.uint8) data = data.reshape( num_images, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS) return data def _extract_labels(filename, num_images): """Extract the labels into a vector of int64 label IDs.""" with gzip.open(filename) as bytestream: bytestream.read(8) buf = bytestream.read(1 * num_images) labels = np.frombuffer(buf, dtype=np.uint8).astype(np.int64) return labels train_data_filename_ = os.path.join(MNIST_DIR, 'train-images-idx3-ubyte.gz') train_labels_filename_ = os.path.join(MNIST_DIR, 'train-labels-idx1-ubyte.gz') test_data_filename_ = os.path.join(MNIST_DIR, 't10k-images-idx3-ubyte.gz') test_labels_filename_ = os.path.join(MNIST_DIR, 't10k-labels-idx1-ubyte.gz') train_data = _extract_images(train_data_filename_, 20) train_labels = _extract_labels(train_labels_filename_, 20) test_data = _extract_images(test_data_filename_, 10) test_labels = _extract_labels(test_labels_filename_, 10) reader = FileReader(train_name) for x, data, label in zip(reader.get_next(), train_data, train_labels): _, img = cv2.imencode(".jpeg", data) assert np.array(x['data']) == img.tobytes() assert np.array(x['label']) == label reader.close() reader = FileReader(test_name) for x, data, label in zip(reader.get_next(), test_data, test_labels): _, img = cv2.imencode(".jpeg", data) assert np.array(x['data']) == img.tobytes() assert np.array(x['label']) == label reader.close() def test_mnist_to_mindrecord_multi_partition(fixture_file): """test transform mnist dataset to multiple mindrecord files.""" mnist_transformer = MnistToMR(MNIST_DIR, FILE_NAME, PARTITION_NUM) mnist_transformer.transform() read("mnist_train.mindrecord0", "mnist_test.mindrecord0")
2,477
480
<gh_stars>100-1000 import unittest from nlu import * class TestWordSegmenter(unittest.TestCase): def test_word_segmenter(self): pipe = nlu.load('zh.segment_words',verbose=True) data = '您的生活就是矩阵编程固有的不平衡方程的剩余部分之和。您是异常的最终结果,尽管做出了我最大的努力,但我仍然无法消除数学精度的和谐。尽管仍然不遗余力地避免了负担,但这并不意外,因此也不超出控制范围。这无可避免地将您引向了这里。' df = pipe.predict(data, output_level='token',drop_irrelevant_cols=False, metadata=True, ) for c in df.columns: print(df[c]) pipe = nlu.load('zh.tokenize',verbose=True) data = '您的生活就是矩阵编程固有的不平衡方程的剩余部分之和。您是异常的最终结果,尽管做出了我最大的努力,但我仍然无法消除数学精度的和谐。尽管仍然不遗余力地避免了负担,但这并不意外,因此也不超出控制范围。这无可避免地将您引向了这里。' df = pipe.predict([data], output_level='sentence',drop_irrelevant_cols=False, metadata=True, ) for c in df.columns: print(df[c]) if __name__ == '__main__': unittest.main()
732
14,425
<filename>hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.crypto; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.util.StringUtils; /** * Defines properties of a CipherSuite. Modeled after the ciphers in * {@link javax.crypto.Cipher}. */ @InterfaceAudience.Private public enum CipherSuite { UNKNOWN("Unknown", 0), AES_CTR_NOPADDING("AES/CTR/NoPadding", 16), SM4_CTR_NOPADDING("SM4/CTR/NoPadding", 16); private final String name; private final int algoBlockSize; private Integer unknownValue = null; CipherSuite(String name, int algoBlockSize) { this.name = name; this.algoBlockSize = algoBlockSize; } public void setUnknownValue(int unknown) { this.unknownValue = unknown; } public int getUnknownValue() { return unknownValue; } /** * @return name of cipher suite, as in {@link javax.crypto.Cipher} */ public String getName() { return name; } /** * @return size of an algorithm block in bytes */ public int getAlgorithmBlockSize() { return algoBlockSize; } @Override public String toString() { StringBuilder builder = new StringBuilder("{"); builder.append("name: " + name) .append(", algorithmBlockSize: " + algoBlockSize); if (unknownValue != null) { builder.append(", unknownValue: " + unknownValue); } builder.append("}"); return builder.toString(); } /** * Convert to CipherSuite from name, {@link #algoBlockSize} is fixed for * certain cipher suite, just need to compare the name. * @param name cipher suite name * @return CipherSuite cipher suite */ public static CipherSuite convert(String name) { CipherSuite[] suites = CipherSuite.values(); for (CipherSuite suite : suites) { if (suite.getName().equals(name)) { return suite; } } throw new IllegalArgumentException("Invalid cipher suite name: " + name); } /** * Returns suffix of cipher suite configuration. * @return String configuration suffix */ public String getConfigSuffix() { String[] parts = name.split("/"); StringBuilder suffix = new StringBuilder(); for (String part : parts) { suffix.append(".").append(StringUtils.toLowerCase(part)); } return suffix.toString(); } }
1,037
2,671
<reponame>timmartin/skulpt<filename>test/tokenize/t02.py<gh_stars>1000+ 0o123 <= 0123
39
542
#ifndef GUARD_le_tessellator_H #define GUARD_le_tessellator_H #include "le_core.h" #ifdef __cplusplus # include <glm/fwd.hpp> #endif struct le_tessellator_o; // clang-format off struct le_tessellator_api { typedef uint16_t IndexType; struct le_tessellator_interface_t { static constexpr auto OptionsWindingsOffset = 3; enum Options : uint64_t { // Flip one or more bits for options. bitUseEarcutTessellator = 1 << 0, // use earcut over libtess, libtess being default bitConstrainedDelaunayTriangulation = 1 << 1, /* ignored if tessellator not libtess */ bitReverseContours = 1 << 2, /* ignored if tessellator not libtess */ // Pick *one* of the following winding modes; // For a description of winding modes, see: <http://www.glprogramming.com/red/chapter11.html> eWindingOdd = 0 << OptionsWindingsOffset, /* ignored if tessellator not libtess */ eWindingNonzero = 1 << OptionsWindingsOffset, /* ignored if tessellator not libtess */ eWindingPositive = 3 << OptionsWindingsOffset, /* ignored if tessellator not libtess */ eWindingNegative = 4 << OptionsWindingsOffset, /* ignored if tessellator not libtess */ eWindingAbsGeqTwo = 5 << OptionsWindingsOffset, /* ignored if tessellator not libtess */ }; le_tessellator_o * ( * create ) ( ); void ( * destroy ) ( le_tessellator_o* self ); void ( * set_options ) ( le_tessellator_o* self, uint64_t options); void ( * add_polyline ) ( le_tessellator_o* self, glm::vec2 const * const pPoints, size_t const& pointCount ); void ( * get_indices ) ( le_tessellator_o* self, IndexType const ** pIndices, size_t * indexCount ); void ( * get_vertices ) ( le_tessellator_o* self, glm::vec2 const ** pVertices, size_t * vertexCount ); bool ( * tessellate ) ( le_tessellator_o* self ); void ( * reset ) ( le_tessellator_o* self ); }; le_tessellator_interface_t le_tessellator_i; }; // clang-format on LE_MODULE( le_tessellator ); LE_MODULE_LOAD_DEFAULT( le_tessellator ); #ifdef __cplusplus namespace le_tessellator { static const auto &api = le_tessellator_api_i; static const auto &le_tessellator_i = api -> le_tessellator_i; using Options = le_tessellator_api::le_tessellator_interface_t::Options; } // namespace le_tessellator class LeTessellator : NoCopy, NoMove { le_tessellator_o *self; public: LeTessellator() : self( le_tessellator::le_tessellator_i.create() ) { } ~LeTessellator() { le_tessellator::le_tessellator_i.destroy( self ); } operator auto() { return self; } }; #endif // __cplusplus #endif
1,352
1,652
<reponame>z131031231/x-pipe package com.ctrip.xpipe.redis.console.service; import com.ctrip.xpipe.redis.console.AbstractConsoleIntegrationTest; import com.ctrip.xpipe.redis.console.model.ZoneTbl; import org.junit.Assert; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import java.util.List; /** * @author taotaotu * May 24, 2019 */ public class ZoneServiceTest extends AbstractConsoleIntegrationTest { private static final String H2DB_ZONE_SHA = "SHA"; private static final String NEW_ZONE_NAME = "SFO"; private static final long NEW_ZONE_ID = 3; @Autowired private ZoneService zoneService; @Test public void testQuery(){ ZoneTbl zoneTbl = zoneService.findById(1); Assert.assertEquals(H2DB_ZONE_SHA, zoneTbl.getZoneName()); } @Test public void testQueryAll(){ List<ZoneTbl> zoneTbls = zoneService.findAllZones(); Assert.assertEquals(2, zoneTbls.size()); Assert.assertEquals(H2DB_ZONE_SHA, zoneTbls.get(0).getZoneName()); } @Test public void testCreateZone(){ zoneService.insertRecord(NEW_ZONE_NAME); ZoneTbl zoneTbl = zoneService.findById(NEW_ZONE_ID); Assert.assertEquals(NEW_ZONE_NAME, zoneTbl.getZoneName()); } }
514
848
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/core/util/dump_graph.h" #include "tensorflow/core/graph/graph.h" #include "tensorflow/core/graph/node_builder.h" #include "tensorflow/core/lib/io/path.h" #include "tensorflow/core/lib/strings/proto_serialization.h" #include "tensorflow/core/platform/env.h" #include "tensorflow/core/platform/test.h" namespace tensorflow { namespace { TEST(DumpGraph, DumpGraphToFileSuccess) { Graph graph(OpRegistry::Global()); Node* node; TF_CHECK_OK(NodeBuilder("A", "NoOp").Finalize(&graph, &node)); setenv("TF_DUMP_GRAPH_PREFIX", testing::TmpDir().c_str(), 1); string ret = DumpGraphToFile("graph", graph); EXPECT_EQ(ret, io::JoinPath(testing::TmpDir(), "graph.pbtxt")); ret = DumpGraphToFile("graph", graph); EXPECT_EQ(ret, io::JoinPath(testing::TmpDir(), "graph_1.pbtxt")); GraphDef gdef; TF_CHECK_OK(ReadTextProto( Env::Default(), io::JoinPath(testing::TmpDir(), "graph.pbtxt"), &gdef)); string read, written; gdef.AppendToString(&read); graph.ToGraphDefDebug().AppendToString(&written); EXPECT_EQ(read, written); } TEST(DumpGraph, DumpGraphToFileNoEnvPrefix) { Graph graph(OpRegistry::Global()); unsetenv("TF_DUMP_GRAPH_PREFIX"); string ret = DumpGraphToFile("graph", graph); EXPECT_EQ(ret, "(TF_DUMP_GRAPH_PREFIX not specified)"); } TEST(DumpGraph, DumpFunctionDefToFileSuccess) { FunctionDef fdef; setenv("TF_DUMP_GRAPH_PREFIX", testing::TmpDir().c_str(), 1); string ret = DumpFunctionDefToFile("function", fdef); EXPECT_EQ(ret, io::JoinPath(testing::TmpDir(), "function.pbtxt")); } } // namespace } // namespace tensorflow
773
14,668
<gh_stars>1000+ // Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "media/gpu/vaapi/test/scoped_va_context.h" #include "media/gpu/vaapi/test/macros.h" #include "media/gpu/vaapi/test/scoped_va_config.h" #include "media/gpu/vaapi/test/vaapi_device.h" namespace media { namespace vaapi_test { ScopedVAContext::ScopedVAContext(const VaapiDevice& device, const ScopedVAConfig& config, const gfx::Size& size) : device_(device), config_(config), context_id_(VA_INVALID_ID), size_(size) { const VAStatus res = vaCreateContext(device_.display(), config_.id(), size_.width(), size_.height(), VA_PROGRESSIVE, /*render_targets=*/nullptr, /*num_render_targets=*/0, &context_id_); VA_LOG_ASSERT(res, "vaCreateContext"); LOG_ASSERT(context_id_ != VA_INVALID_ID) << "vaCreateContext created invalid context ID"; VLOG(1) << "Created context with ID " << context_id_; } ScopedVAContext::~ScopedVAContext() { VLOG(1) << "Destroying context " << context_id_; DCHECK_NE(context_id_, VA_INVALID_ID); const VAStatus res = vaDestroyContext(device_.display(), context_id_); VA_LOG_ASSERT(res, "vaDestroyContext"); } } // namespace vaapi_test } // namespace media
622
2,338
//===--- RedundantPreprocessorCheck.h - clang-tidy --------------*- C++ -*-===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// #ifndef LLVM_CLANG_TOOLS_EXTRA_CLANG_TIDY_READABILITY_REDUNDANTPREPROCESSORCHECK_H #define LLVM_CLANG_TOOLS_EXTRA_CLANG_TIDY_READABILITY_REDUNDANTPREPROCESSORCHECK_H #include "../ClangTidyCheck.h" namespace clang { namespace tidy { namespace readability { /// This check flags redundant preprocessor directives: nested directives with /// the same condition. /// /// For the user-facing documentation see: /// http://clang.llvm.org/extra/clang-tidy/checks/readability-redundant-preprocessor.html class RedundantPreprocessorCheck : public ClangTidyCheck { public: RedundantPreprocessorCheck(StringRef Name, ClangTidyContext *Context) : ClangTidyCheck(Name, Context) {} void registerPPCallbacks(const SourceManager &SM, Preprocessor *PP, Preprocessor *ModuleExpanderPP) override; }; } // namespace readability } // namespace tidy } // namespace clang #endif // LLVM_CLANG_TOOLS_EXTRA_CLANG_TIDY_READABILITY_REDUNDANTPREPROCESSORCHECK_H
460
1,338
/* * Copyright 2008, <NAME>, <EMAIL>. * Distributed under the terms of the MIT License. */ #ifndef _VFS_FIFO_H #define _VFS_FIFO_H #include <fs_interface.h> status_t create_fifo_vnode(fs_volume* superVolume, fs_vnode* vnode); void fifo_init(); #endif // _VFS_FIFO_H
118
1,444
<gh_stars>1000+ package org.mage.test.cards.single.m21; import mage.constants.PhaseStep; import mage.constants.Zone; import mage.counters.CounterType; import org.junit.Test; import org.mage.test.serverside.base.CardTestPlayerBase; public class AnimalSanctuaryTest extends CardTestPlayerBase { private static final String sanctuary = "Animal Sanctuary"; private static final String bird = "Birds of Paradise"; private static final String cat = "Ajani's Pridemate"; private static final String dog = "Wild Mongrel"; private static final String goat = "Mountain Goat"; private static final String ox = "Raging Bull"; private static final String snake = "Anaconda"; @Test public void boostBird(){ addCard(Zone.BATTLEFIELD, playerA, sanctuary); addCard(Zone.BATTLEFIELD, playerA, "Plains", 2); addCard(Zone.BATTLEFIELD, playerA, bird); // {2}, {T}: Put a +1/+1 counter on target Bird, Cat, Dog, Goat, Ox, or Snake. activateAbility(1, PhaseStep.PRECOMBAT_MAIN, playerA, "{2}, {T}: ", bird); setStrictChooseMode(true); setStopAt(1, PhaseStep.POSTCOMBAT_MAIN); execute(); assertAllCommandsUsed(); assertCounterCount(playerA, bird, CounterType.P1P1, 1); } @Test public void boostCat(){ addCard(Zone.BATTLEFIELD, playerA, sanctuary); addCard(Zone.BATTLEFIELD, playerA, "Plains", 2); addCard(Zone.BATTLEFIELD, playerA, cat); // {2}, {T}: Put a +1/+1 counter on target Bird, Cat, Dog, Goat, Ox, or Snake. activateAbility(1, PhaseStep.PRECOMBAT_MAIN, playerA, "{2}, {T}: ", cat); setStrictChooseMode(true); setStopAt(1, PhaseStep.POSTCOMBAT_MAIN); execute(); assertAllCommandsUsed(); assertCounterCount(playerA, cat, CounterType.P1P1, 1); } @Test public void boostDog(){ addCard(Zone.BATTLEFIELD, playerA, sanctuary); addCard(Zone.BATTLEFIELD, playerA, "Plains", 2); addCard(Zone.BATTLEFIELD, playerA, dog); // {2}, {T}: Put a +1/+1 counter on target Bird, Cat, Dog, Goat, Ox, or Snake. activateAbility(1, PhaseStep.PRECOMBAT_MAIN, playerA, "{2}, {T}: ", dog); setStrictChooseMode(true); setStopAt(1, PhaseStep.POSTCOMBAT_MAIN); execute(); assertAllCommandsUsed(); assertCounterCount(playerA, dog, CounterType.P1P1, 1); } @Test public void boostGoat(){ addCard(Zone.BATTLEFIELD, playerA, sanctuary); addCard(Zone.BATTLEFIELD, playerA, "Plains", 2); addCard(Zone.BATTLEFIELD, playerA, goat); // {2}, {T}: Put a +1/+1 counter on target Bird, Cat, Dog, Goat, Ox, or Snake. activateAbility(1, PhaseStep.PRECOMBAT_MAIN, playerA, "{2}, {T}: ", goat); setStrictChooseMode(true); setStopAt(1, PhaseStep.POSTCOMBAT_MAIN); execute(); assertAllCommandsUsed(); assertCounterCount(playerA, goat, CounterType.P1P1, 1); } @Test public void boostOx(){ addCard(Zone.BATTLEFIELD, playerA, sanctuary); addCard(Zone.BATTLEFIELD, playerA, "Plains", 2); addCard(Zone.BATTLEFIELD, playerA, ox); // {2}, {T}: Put a +1/+1 counter on target Bird, Cat, Dog, Goat, Ox, or Snake. activateAbility(1, PhaseStep.PRECOMBAT_MAIN, playerA, "{2}, {T}: ", ox); setStrictChooseMode(true); setStopAt(1, PhaseStep.POSTCOMBAT_MAIN); execute(); assertAllCommandsUsed(); assertCounterCount(playerA, ox, CounterType.P1P1, 1); } @Test public void boostSnake(){ addCard(Zone.BATTLEFIELD, playerA, sanctuary); addCard(Zone.BATTLEFIELD, playerA, "Plains", 2); addCard(Zone.BATTLEFIELD, playerA, snake); // {2}, {T}: Put a +1/+1 counter on target Bird, Cat, Dog, Goat, Ox, or Snake. activateAbility(1, PhaseStep.PRECOMBAT_MAIN, playerA, "{2}, {T}: ", snake); setStrictChooseMode(true); setStopAt(1, PhaseStep.POSTCOMBAT_MAIN); execute(); assertAllCommandsUsed(); assertCounterCount(playerA, snake, CounterType.P1P1, 1); } }
1,726
14,668
<filename>chromecast/crash/linux/dummy_minidump_generator.h // Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROMECAST_CRASH_LINUX_DUMMY_MINIDUMP_GENERATOR_H_ #define CHROMECAST_CRASH_LINUX_DUMMY_MINIDUMP_GENERATOR_H_ #include <string> #include "chromecast/crash/linux/minidump_generator.h" namespace chromecast { class DummyMinidumpGenerator : public MinidumpGenerator { public: // A dummy minidump generator to move an existing minidump into // crash_uploader's monitoring path ($HOME/minidumps). The path is monitored // with file lock-control, so that third process should not write to it // directly. explicit DummyMinidumpGenerator(const std::string& existing_minidump_path); DummyMinidumpGenerator(const DummyMinidumpGenerator&) = delete; DummyMinidumpGenerator& operator=(const DummyMinidumpGenerator&) = delete; // MinidumpGenerator implementation: // Moves the minidump located at |existing_minidump_path_| to |minidump_path|. // Returns true if successful, false otherwise. Note that this function MUST // be called on a thread without IO restrictions, or it will fail fatally. bool Generate(const std::string& minidump_path) override; private: const std::string existing_minidump_path_; }; } // namespace chromecast #endif // CHROMECAST_CRASH_LINUX_DUMMY_MINIDUMP_GENERATOR_H_
464
1,056
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.editor; /** * Token-item presents a token as a piece information * without dependence on a character buffer and it enables * to chain the token-items in both directions. * * @author <NAME> * @version 1.00 */ public interface TokenItem { /** Get the token-id of this token-item */ public TokenID getTokenID(); /** Get the token-id of this token-item */ public TokenContextPath getTokenContextPath(); /** Get the position of the token in the document */ public int getOffset(); /** Get the image of this token. */ public String getImage(); /** Get next token-item in the text. It returns null * if there's no more next tokens in the text. It can throw * <tt>IllegalStateException</tt> in case the document * was changed so the token-item chain becomes invalid. */ public TokenItem getNext(); /** Get previous token-item in the text. It returns null * if there's no more previous tokens in the text. It can throw * <tt>IllegalStateException</tt> in case the document * was changed so the token-item chain becomes invalid. */ public TokenItem getPrevious(); /** Abstract implementation that doesn't contain chaining methods. */ public static abstract class AbstractItem implements TokenItem { private TokenID tokenID; private TokenContextPath tokenContextPath; private String image; private int offset; public AbstractItem(TokenID tokenID, TokenContextPath tokenContextPath, int offset, String image) { this.tokenID = tokenID; this.tokenContextPath = tokenContextPath; this.offset = offset; this.image = image; } public TokenID getTokenID() { return tokenID; } public TokenContextPath getTokenContextPath() { return tokenContextPath; } public int getOffset() { return offset; } public String getImage() { return image; } public String toString() { return "'" + org.netbeans.editor.EditorDebug.debugString(getImage()) // NOI18N + "', tokenID=" + getTokenID() + ", tcp=" + getTokenContextPath() // NOI18N + ", offset=" + getOffset(); // NOI18N } } /** Implementation useful for delegation. */ public static class FilterItem implements TokenItem { protected TokenItem delegate; public FilterItem(TokenItem delegate) { this.delegate = delegate; } public TokenID getTokenID() { return delegate.getTokenID(); } public TokenContextPath getTokenContextPath() { return delegate.getTokenContextPath(); } public int getOffset() { return delegate.getOffset(); } public String getImage() { return delegate.getImage(); } public TokenItem getNext() { return delegate.getNext(); } public TokenItem getPrevious() { return delegate.getPrevious(); } public String toString() { return delegate.toString(); } } }
1,452
558
<reponame>pratikadarsh/Algorithms /* * @file ListStack.h * @author (original JAVA) <NAME>, <EMAIL> * (conversion to C++) <NAME>, <EMAIL> * @date 02 July 2020 * @version 0.1 * @brief A linked list implementation of a stack. */ #ifndef D_LINKLIST_STACK_H #define D_LINKLIST_STACK_H #include <Stack.h> #include <deque> #include <list> #include <set> // set and multiset #include <map> // map and multimap #include <unordered_set> // unordered set/multiset #include <unordered_map> // unordered map/multimap #include <iterator> #include <algorithm> #include <numeric> // some numeric algorithm #include <functional> #include <stack> #include <sstream> #include <memory> #include <iostream> #include <stdexcept> namespace dsa { template <typename T> class ListStack : public Stack<T> { private: std::list<T> list_; public: // Create an empty stack ListStack(int id) : Stack<T>(id) {} // Create a Stack with an initial element ListStack(int id, T firstElem) : Stack<T>(id) { push(firstElem); } ~ListStack() { list_.clear(); } // Iterator class can be used to sequentially access nodes of stack class Iterator { public: Iterator() noexcept : it_ (list_.begin()) { } Iterator& operator=(typename std::list<T>::iterator& it) { this->it_ = it; return *this; } // Prefix ++ overload Iterator& operator++() { if (it_) it_++; return *this; } // Postfix ++ overload Iterator operator++(int) { Iterator iterator = *this; ++*this; return iterator; } bool operator!=(const Iterator& iterator) { return it_ != iterator.it_; } T operator*() { return *it_; } private: const typename std::list<T>::iterator it_; }; // Return the number of elements in the stack int size() override { return list_.size(); } // Clear the stack void clear() override { list_.clear(); } // Check if the stack is empty bool isEmpty() override { return size() == 0; } // Push an element on the stack void push(T elem) override { list_.push_back(elem); } // Pop an element off the stack // Throws an error is the stack is empty T pop() override { if (isEmpty()) throw std::runtime_error("Empty Stack"); T f = list_.back(); list_.erase(--list_.end()); return f; } // Peek the top of the stack without removing an element // Throws an exception if the stack is empty T peek() override { if (isEmpty()) throw std::runtime_error("Empty Stack"); return list_.back(); } }; } // namespace dsa #endif /* D_LINKLIST_STACK_H */
1,028
2,980
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.elasticjob.tracing.api; import lombok.Getter; import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.elasticjob.api.JobExtraConfiguration; import org.apache.shardingsphere.elasticjob.tracing.exception.TracingStorageConverterNotFoundException; import org.apache.shardingsphere.elasticjob.tracing.storage.TracingStorageConverterFactory; /** * Tracing configuration. * * @param <T> type of tracing storage */ @Getter @RequiredArgsConstructor public final class TracingConfiguration<T> implements JobExtraConfiguration { private final String type; private final TracingStorageConfiguration<T> tracingStorageConfiguration; @SuppressWarnings("unchecked") public TracingConfiguration(final String type, final T storage) { this.type = type; this.tracingStorageConfiguration = TracingStorageConverterFactory.findConverter((Class<T>) storage.getClass()) .orElseThrow(() -> new TracingStorageConverterNotFoundException(storage.getClass())).convertObjectToConfiguration(storage); } }
548
32,544
package com.baeldung.java9.language.stream; import org.junit.Test; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; import static java.lang.Integer.*; import static org.junit.Assert.assertEquals; public class StreamFeaturesUnitTest { public static class TakeAndDropWhileTest { public Stream<String> getStreamAfterTakeWhileOperation() { return Stream.iterate("", s -> s + "s").takeWhile(s -> s.length() < 10); } public Stream<String> getStreamAfterDropWhileOperation() { return Stream.iterate("", s -> s + "s").takeWhile(s -> s.length() < 10).dropWhile(s -> !s.contains("sssss")); } @Test public void testTakeWhileOperation() { List<String> list = getStreamAfterTakeWhileOperation().collect(Collectors.toList()); assertEquals(10, list.size()); assertEquals("", list.get(0)); assertEquals("ss", list.get(2)); assertEquals("sssssssss", list.get(list.size() - 1)); } @Test public void testDropWhileOperation() { List<String> list = getStreamAfterDropWhileOperation().collect(Collectors.toList()); assertEquals(5, list.size()); assertEquals("sssss", list.get(0)); assertEquals("sssssss", list.get(2)); assertEquals("sssssssss", list.get(list.size() - 1)); } } public static class IterateTest { private Stream<Integer> getStream() { return Stream.iterate(0, i -> i < 10, i -> i + 1); } @Test public void testIterateOperation() { List<Integer> list = getStream().collect(Collectors.toList()); assertEquals(10, list.size()); assertEquals(valueOf(0), list.get(0)); assertEquals(valueOf(5), list.get(5)); assertEquals(valueOf(9), list.get(list.size() - 1)); } } public static class OfNullableTest { private List<String> collection = Arrays.asList("A", "B", "C"); private Map<String, Integer> map = new HashMap<>() { { put("A", 10); put("C", 30); } }; private Stream<Integer> getStreamWithOfNullable() { return collection.stream().flatMap(s -> Stream.ofNullable(map.get(s))); } private Stream<Integer> getStream() { return collection.stream().flatMap(s -> { Integer temp = map.get(s); return temp != null ? Stream.of(temp) : Stream.empty(); }); } private List<Integer> testOfNullableFrom(Stream<Integer> stream) { List<Integer> list = stream.collect(Collectors.toList()); assertEquals(2, list.size()); assertEquals(valueOf(10), list.get(0)); assertEquals(valueOf(30), list.get(list.size() - 1)); return list; } @Test public void testOfNullable() { assertEquals(testOfNullableFrom(getStream()), testOfNullableFrom(getStreamWithOfNullable())); } } }
1,449
474
package org.javacord.api.entity.channel; import org.javacord.api.entity.channel.internal.GroupChannelUpdaterDelegate; import org.javacord.api.util.internal.DelegateFactory; import java.util.concurrent.CompletableFuture; /** * This class can be used to update group channels. */ public class GroupChannelUpdater { /** * The group channel delegate used by this instance. */ private final GroupChannelUpdaterDelegate delegate; /** * Creates a new group channel updater. * * @param channel The channel to update. */ public GroupChannelUpdater(GroupChannel channel) { delegate = DelegateFactory.createGroupChannelUpdaterDelegate(channel); } /** * Queues the name to be updated. * * @param name The new name of the channel. * @return The current instance in order to chain call methods. */ public GroupChannelUpdater setName(String name) { delegate.setName(name); return this; } /** * Performs the queued updates. * * @return A future to check if the update was successful. */ public CompletableFuture<Void> update() { return delegate.update(); } }
432
3,269
// Time: O(m * n * log(m * n)) // Space: O(m * n) // Dijkstra algorithm solution class Solution { public: int minimumEffortPath(vector<vector<int>>& heights) { static const vector<pair<int, int>> directions{{0, 1}, {1, 0}, {0, -1}, {-1, 0}}; using T = tuple<int, int, int>; vector<vector<int>> dist(size(heights), vector<int>(size(heights[0]), numeric_limits<int>::max())); dist[0][0] = 0; priority_queue<T, vector<T>, greater<T>> min_heap; min_heap.emplace(0, 0, 0); vector<vector<int>> lookup(size(heights), vector<int>(size(heights[0]))); while (!empty(min_heap)) { const auto [d, r, c] = min_heap.top(); min_heap.pop(); if (lookup[r][c]) { continue; } lookup[r][c] = true; if (r == size(heights) - 1 && c == size(heights[0]) - 1) { return d; } for (const auto& [dr, dc] : directions) { int nr = r + dr, nc = c + dc; if (!(0 <= nr && nr < size(heights) && 0 <= nc && nc < size(heights[0]) && !lookup[nr][nc])) { continue; } int nd = max(d, abs(heights[nr][nc] - heights[r][c])); if (nd < dist[nr][nc]) { dist[nr][nc] = nd; min_heap.emplace(nd, nr, nc); } } } return -1; } }; // Time: O(m * n * log(m * n) + m * n * α(m * n)) = O(m * n * log(m * n)) // Space: O(m * n) // union find solution class Solution2 { public: int minimumEffortPath(vector<vector<int>>& heights) { vector<tuple<int, int, int>> diffs; for (int i = 0; i < size(heights); ++i) { for (int j = 0; j < size(heights[0]); ++j) { if (i > 0) { diffs.emplace_back(abs(heights[i][j] - heights[i - 1][j]), index(size(heights[0]), i - 1, j), index(size(heights[0]), i, j)); } if (j > 0) { diffs.emplace_back(abs(heights[i][j] - heights[i][j - 1]), index(size(heights[0]), i, j - 1), index(size(heights[0]), i, j)); } } } sort(begin(diffs), end(diffs)); UnionFind union_find(size(heights) * size(heights[0])); for (const auto& [d, i, j] : diffs) { if (union_find.union_set(i, j)) { if (union_find.find_set(index(size(heights[0]), 0, 0)) == union_find.find_set(index(size(heights[0]), size(heights) - 1, size(heights[0]) - 1))) { return d; } } } return 0; } private: class UnionFind { public: UnionFind(const int n) : set_(n) , rank_(n) , count_(n) { iota(set_.begin(), set_.end(), 0); } int find_set(const int x) { if (set_[x] != x) { set_[x] = find_set(set_[x]); // Path compression. } return set_[x]; } bool union_set(const int x, const int y) { int x_root = find_set(x), y_root = find_set(y); if (x_root == y_root) { return false; } if (rank_[x_root] < rank_[y_root]) { // Union by rank. set_[x_root] = y_root; } else if (rank_[x_root] > rank_[y_root]) { set_[y_root] = x_root; } else { set_[y_root] = x_root; ++rank_[x_root]; } --count_; return true; } int size() const { return count_; } private: vector<int> set_; vector<int> rank_; int count_; }; int index(int n, int i, int j) { return i * n + j; } }; // Time: O(m * n * logh) // Space: O(m * n) // bi-bfs solution class Solution3 { public: int minimumEffortPath(vector<vector<int>>& heights) { static const int MAX_H = 1e6; int left = 0, right = MAX_H; while (left <= right) { int mid = left + (right - left) / 2; if (check(heights, mid)) { right = mid - 1; } else { left = mid + 1; } } return left; } private: bool check(const vector<vector<int>>& heights, int x) { static const vector<pair<int, int>> directions{{0, 1}, {1, 0}, {0, -1}, {-1, 0}}; vector<vector<int>> lookup(size(heights), vector<int>(size(heights[0]))); unordered_set<pair<int, int>, PairHash<int>> left({{0, 0}}); unordered_set<pair<int, int>, PairHash<int>> right({{size(heights) - 1, size(heights[0]) - 1}}); while (!empty(left)) { for (const auto& [r, c] : left) { lookup[r][c] = true; } unordered_set<pair<int, int>, PairHash<int>> new_left; for (const auto& [r, c] : left) { if (right.count(pair(r, c))) { return true; } for (const auto& [dr, dc] : directions) { int nr = r + dr, nc = c + dc; if (!(0 <= nr && nr < size(heights) && 0 <= nc && nc < size(heights[0]) && abs(heights[nr][nc] - heights[r][c]) <= x && !lookup[nr][nc])) { continue; } new_left.emplace(nr, nc); } } left = move(new_left); if (size(left) > size(right)) { swap(left, right); } } return false; } template <typename T> struct PairHash { size_t operator()(const pair<T, T>& p) const { size_t seed = 0; seed ^= std::hash<T>{}(p.first) + 0x9e3779b9 + (seed<<6) + (seed>>2); seed ^= std::hash<T>{}(p.second) + 0x9e3779b9 + (seed<<6) + (seed>>2); return seed; } }; }; // Time: O(m * n * logh) // Space: O(m * n) // bfs solution class Solution4 { public: int minimumEffortPath(vector<vector<int>>& heights) { static const int MAX_H = 1e6; int left = 0, right = MAX_H; while (left <= right) { int mid = left + (right - left) / 2; if (check(heights, mid)) { right = mid - 1; } else { left = mid + 1; } } return left; } private: bool check(const vector<vector<int>>& heights, int x) { static const vector<pair<int, int>> directions{{0, 1}, {1, 0}, {0, -1}, {-1, 0}}; queue<pair<int, int>> q({{0, 0}}); vector<vector<int>> lookup(size(heights), vector<int>(size(heights[0]))); while (!empty(q)) { const auto [r, c] = q.front(); q.pop(); if (r == size(heights) - 1 && c == size(heights[0]) - 1) { return true; } for (const auto& [dr, dc] : directions) { int nr = r + dr, nc = c + dc; if (!(0 <= nr && nr < size(heights) && 0 <= nc && nc < size(heights[0]) && abs(heights[nr][nc] - heights[r][c]) <= x && !lookup[nr][nc])) { continue; } lookup[nr][nc] = true; q.emplace(nr, nc); } } return false; } template <typename T> struct PairHash { size_t operator()(const pair<T, T>& p) const { size_t seed = 0; seed ^= std::hash<T>{}(p.first) + 0x9e3779b9 + (seed<<6) + (seed>>2); seed ^= std::hash<T>{}(p.second) + 0x9e3779b9 + (seed<<6) + (seed>>2); return seed; } }; }; // Time: O(m * n * logh) // Space: O(m * n) // dfs solution class Solution5 { public: int minimumEffortPath(vector<vector<int>>& heights) { static const int MAX_H = 1e6; int left = 0, right = MAX_H; while (left <= right) { int mid = left + (right - left) / 2; if (check(heights, mid)) { right = mid - 1; } else { left = mid + 1; } } return left; } private: bool check(const vector<vector<int>>& heights, int x) { static const vector<pair<int, int>> directions{{0, 1}, {1, 0}, {0, -1}, {-1, 0}}; vector<pair<int, int>> stk({{0, 0}}); vector<vector<int>> lookup(size(heights), vector<int>(size(heights[0]))); while (!empty(stk)) { const auto [r, c] = stk.back(); stk.pop_back(); if (r == size(heights) - 1 && c == size(heights[0]) - 1) { return true; } for (const auto& [dr, dc] : directions) { int nr = r + dr, nc = c + dc; if (!(0 <= nr && nr < size(heights) && 0 <= nc && nc < size(heights[0]) && abs(heights[nr][nc] - heights[r][c]) <= x && !lookup[nr][nc])) { continue; } lookup[nr][nc] = true; stk.emplace_back(nr, nc); } } return false; } template <typename T> struct PairHash { size_t operator()(const pair<T, T>& p) const { size_t seed = 0; seed ^= std::hash<T>{}(p.first) + 0x9e3779b9 + (seed<<6) + (seed>>2); seed ^= std::hash<T>{}(p.second) + 0x9e3779b9 + (seed<<6) + (seed>>2); return seed; } }; };
5,966
700
#define BLKDEV_BASE 0x10015000 #define BLKDEV_ADDR BLKDEV_BASE #define BLKDEV_OFFSET (BLKDEV_BASE + 8) #define BLKDEV_LEN (BLKDEV_BASE + 12) #define BLKDEV_WRITE (BLKDEV_BASE + 16) #define BLKDEV_REQUEST (BLKDEV_BASE + 17) #define BLKDEV_NREQUEST (BLKDEV_BASE + 18) #define BLKDEV_COMPLETE (BLKDEV_BASE + 19) #define BLKDEV_NCOMPLETE (BLKDEV_BASE + 20) #define BLKDEV_NSECTORS (BLKDEV_BASE + 24) #define BLKDEV_MAX_REQUEST_LENGTH (BLKDEV_BASE + 28) #define BLKDEV_SECTOR_SIZE 512 #define BLKDEV_SECTOR_SHIFT 9 static inline size_t blkdev_nsectors(void) { return reg_read32(BLKDEV_NSECTORS); } static inline size_t blkdev_max_req_len(void) { return reg_read32(BLKDEV_MAX_REQUEST_LENGTH); } static inline unsigned int blkdev_send_request( unsigned long addr, unsigned int offset, unsigned int len, unsigned char write) { reg_write64(BLKDEV_ADDR, addr); reg_write32(BLKDEV_OFFSET, offset); reg_write32(BLKDEV_LEN, len); reg_write8(BLKDEV_WRITE, write); asm volatile ("fence"); return reg_read8(BLKDEV_REQUEST); }
456
5,250
<filename>modules/flowable-variable-service/src/main/java/org/flowable/variable/service/impl/types/TraceableVariablesCommandContextCloseListener.java /* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.variable.service.impl.types; import org.flowable.common.engine.impl.db.DbSqlSession; import org.flowable.common.engine.impl.interceptor.CommandContext; import org.flowable.common.engine.impl.interceptor.CommandContextCloseListener; /** * A {@link CommandContextCloseListener} that holds one {@link TraceableObject} instance that is added by {@link MutableVariableType}(s). * * On the {@link #closing(CommandContext)} of the {@link CommandContext}, the {@link TraceableObject} will be verified if it is dirty. * If so, it will update the right entities such that changes will be flushed. * * It's important that this happens in the {@link #closing(CommandContext)}, as this happens before the {@link CommandContext#close()} is called * and when all the sessions are flushed (including the {@link DbSqlSession} in the relational DB case (the data needs to be ready then). * * @author <NAME> * @author <NAME> */ public class TraceableVariablesCommandContextCloseListener implements CommandContextCloseListener { protected TraceableObject<?, ?> traceableObject; public TraceableVariablesCommandContextCloseListener(TraceableObject<?, ?> traceableObject) { this.traceableObject = traceableObject; } @Override public void closing(CommandContext commandContext) { traceableObject.updateIfValueChanged(); } @Override public void closed(CommandContext commandContext) { } @Override public void afterSessionsFlush(CommandContext commandContext) { } @Override public void closeFailure(CommandContext commandContext) { } @Override public Integer order() { return 1; } @Override public boolean multipleAllowed() { return true; } }
733
571
// // EmptyDataSet_Swift.h // EmptyDataSet-Swift // // Created by <NAME> on 04/07/2019. // Copyright © 2019 Xiaoye. All rights reserved. // #import <UIKit/UIKit.h> //! Project version number for EmptyDataSet_Swift. FOUNDATION_EXPORT double EmptyDataSet_SwiftVersionNumber; //! Project version string for EmptyDataSet_Swift. FOUNDATION_EXPORT const unsigned char EmptyDataSet_SwiftVersionString[]; // In this header, you should import all the public headers of your framework using statements like #import <EmptyDataSet_Swift/PublicHeader.h>
171
4,224
/**************************************************************************** * * Copyright (c) 2012-2016 PX4 Development Team. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * 3. Neither the name PX4 nor the names of its contributors may be * used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * ****************************************************************************/ /** * @file top.c * Tool similar to UNIX top command * @see http://en.wikipedia.org/wiki/Top_unix * * @author <NAME> <<EMAIL>> */ #include <px4_platform_common/px4_config.h> #include <stdio.h> #include <fcntl.h> #include <stdbool.h> #include <unistd.h> #include <string.h> #include <poll.h> #include <px4_platform/cpuload.h> #include <px4_platform_common/printload.h> #include <drivers/drv_hrt.h> #include <px4_platform_common/module.h> static void print_usage() { PRINT_MODULE_DESCRIPTION("Monitor running processes and their CPU, stack usage, priority and state"); PRINT_MODULE_USAGE_NAME_SIMPLE("top", "command"); PRINT_MODULE_USAGE_COMMAND_DESCR("once", "print load only once"); } extern "C" __EXPORT int top_main(int argc, char *argv[]) { print_load_s load{}; init_print_load(&load); px4_usleep(200000); /* clear screen */ dprintf(1, "\033[2J\n"); if (argc > 1) { if (!strcmp(argv[1], "once")) { px4_sleep(1); print_load(STDOUT_FILENO, &load); } else { print_usage(); } cpuload_monitor_stop(); return 0; } for (;;) { print_load(STDOUT_FILENO, &load); /* Sleep 200 ms waiting for user input five times ~ 1s */ for (int k = 0; k < 5; k++) { char c; struct pollfd fds; int ret; fds.fd = 0; /* stdin */ fds.events = POLLIN; ret = poll(&fds, 1, 0); if (ret > 0) { ret = read(0, &c, 1); if (ret) { cpuload_monitor_stop(); return 1; } switch (c) { case 0x03: // ctrl-c case 0x1b: // esc case 'c': case 'q': cpuload_monitor_stop(); return 0; /* not reached */ } } px4_usleep(200000); } } cpuload_monitor_stop(); return 0; }
1,224
371
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import subprocess from charmhelpers.core import host from charmhelpers.core.hookenv import (open_port, close_port, log, unit_private_ip, local_unit, config) from charms import layer from charms.layer.apache_bigtop_base import Bigtop from charms.reactive.relations import RelationBase from jujubigdata.utils import DistConfig def format_node(unit, node_ip): ''' Given a juju unit name and an ip address, return a tuple containing an id and formatted ip string suitable for passing to puppet, which will write it out to zoo.cfg. ''' return (unit.split("/")[1], "{ip}:2888:3888".format(ip=node_ip)) class Zookeeper(object): ''' Utility class for managing Zookeeper tasks like configuration, start, stop, and adding and removing nodes. ''' def __init__(self, dist_config=None): self._dist_config = dist_config or DistConfig( data=layer.options('apache-bigtop-base')) self._roles = ['zookeeper-server', 'zookeeper-client'] self._hosts = {} def is_zk_leader(self): ''' Attempt to determine whether this node is the Zookeeper leader. Note that Zookeeper tracks leadership independently of juju, and that this command can fail, depending on the state that the Zookeeper node is in when we attempt to run it. ''' try: status = subprocess.check_output( ["/usr/lib/zookeeper/bin/zkServer.sh", "status"]) return "leader" in status.decode('utf-8') except Exception: log( "Unable to determine whether this node is the Zookeeper leader.", level="WARN" ) return False def read_peers(self): ''' Fetch the list of peers available. The first item in this list should always be the node that this code is executing on. ''' # A Zookeeper node likes to be first on the list. nodes = [(local_unit(), unit_private_ip())] # Get the list of peers zkpeer = RelationBase.from_state('zkpeer.joined') if zkpeer: nodes.extend(sorted(zkpeer.get_nodes())) nodes = [format_node(*node) for node in nodes] return nodes def sort_peers(self, zkpeer): ''' Return peers, sorted in an order suitable for performing a rolling restart. ''' peers = self.read_peers() leader = zkpeer.find_zk_leader() peers.sort(key=lambda x: x[1] == leader) return peers @property def dist_config(self): ''' Charm level config. ''' return self._dist_config @property def _override(self): ''' Return a dict of keys and values that will override puppet's defaults. ''' override = { "hadoop_zookeeper::server::myid": local_unit().split("/")[1], "hadoop_zookeeper::server::ensemble": self.read_peers() } conf = config() network_interface = conf.get('network_interface') autopurge_purge_interval = conf.get('autopurge_purge_interval') autopurge_snap_retain_count = conf.get('autopurge_snap_retain_count') if network_interface: key = "hadoop_zookeeper::server::client_bind_addr" override[key] = Bigtop().get_ip_for_interface(network_interface) if autopurge_purge_interval: key = "hadoop_zookeeper::server::autopurge_purge_interval" override[key] = autopurge_purge_interval if autopurge_snap_retain_count: key = "hadoop_zookeeper::server::autopurge_snap_retain_count" override[key] = autopurge_snap_retain_count return override def install(self, nodes=None): ''' Write out the config, then run puppet. After this runs, we should have a configured and running service. ''' bigtop = Bigtop() log("Rendering site yaml ''with overrides: {}".format(self._override)) bigtop.render_site_yaml(self._hosts, self._roles, self._override) bigtop.trigger_puppet() if self.is_zk_leader(): zkpeer = RelationBase.from_state('zkpeer.joined') zkpeer.set_zk_leader() def start(self): ''' Request that our service start. Normally, puppet will handle this for us. ''' host.service_start('zookeeper-server') def stop(self): ''' Stop Zookeeper. ''' host.service_stop('zookeeper-server') def open_ports(self): ''' Expose the ports in the configuration to the outside world. ''' for port in self.dist_config.exposed_ports('zookeeper'): open_port(port) def close_ports(self): ''' Close off communication from the outside world. ''' for port in self.dist_config.exposed_ports('zookeeper'): close_port(port) def quorum_check(self): ''' Returns a string reporting the node count. Append a message informing the user if the node count is too low for good quorum, or is even (meaning that one of the nodes is redundant for quorum). ''' node_count = len(self.read_peers()) if node_count == 1: count_str = "{} unit".format(node_count) else: count_str = "{} units".format(node_count) if node_count < 3: return " ({}; less than 3 is suboptimal)".format(count_str) if node_count % 2 == 0: return " ({}; an even number is suboptimal)".format(count_str) return "({})".format(count_str)
2,729
624
package com.pinterest.doctork.security; import javax.ws.rs.container.ContainerRequestFilter; import com.pinterest.doctork.config.DoctorKConfig; /** * This extends JAX-RS containter request filter for authorization. * * Please refer to https://docs.oracle.com/javaee/7/api/javax/ws/rs/container/ContainerRequestFilter.html * for more details on how {@link ContainerRequestFilter} works */ public interface DoctorKAuthorizationFilter extends ContainerRequestFilter { public void configure(DoctorKConfig config) throws Exception; }
155
435
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, division import datetime import os import json import time import pytest from petl.compat import text_type from petl.io.gsheet import fromgsheet, togsheet, appendgsheet from petl.test.helpers import ieq, get_env_vars_named gspread = pytest.importorskip("gspread") uuid = pytest.importorskip("uuid") # region helpers def _get_gspread_credentials(): json_path = os.getenv("PETL_GCP_JSON_PATH", None) if json_path is not None and os.path.exists(json_path): return json_path json_props = get_env_vars_named("PETL_GCP_CREDS_") if json_props is not None: return json_props user_path = os.path.expanduser("~/.config/gspread/service_account.json") if os.path.isfile(user_path) and os.path.exists(user_path): return user_path return None found_gcp_credentials = pytest.mark.skipif( _get_gspread_credentials() is None, reason="""SKIPPED. to/from gspread needs json credentials for testing. In order to run google spreadsheet tests, follow the steps bellow: 1. Create a json authorization file, following the steps described at http://gspread.readthedocs.io/en/latest/oauth2.html, and save to a local path 2. Point the envvar `PETL_GCP_JSON_PATH` to the json authorization file path 2. Or fill the properties inside the json authorization file in envrionment variables named with prefix PETL_GCP_CREDS_: PETL_GCP_CREDS_project_id=petl 3. Or else save the file in one of the following paths: unix: ~/.config/gspread/service_account.json windows: %APPDATA%\\gspread\\service_account.json""" ) def _get_env_credentials(): creds = _get_gspread_credentials() if isinstance(creds, dict): return creds if isinstance(creds, text_type): with open(creds, encoding="utf-8") as json_file: creds = json.load(json_file) return creds return None def _get_gspread_client(): credentials = _get_env_credentials() try: if credentials is None: gspread_client = gspread.service_account() else: gspread_client = gspread.service_account_from_dict(credentials) except gspread.exceptions.APIError as ex: pytest.skip("SKIPPED. to/from gspread authentication error: %s" % ex) return None return gspread_client def _get_env_sharing_emails(): emails = get_env_vars_named("PETL_GSHEET_EMAIL", remove_prefix=False) if emails is not None: return list(emails.values()) return [] def _get_gspread_test_params(): filename = "test-{}".format(str(uuid.uuid4())) gspread_client = _get_gspread_client() emails = _get_env_sharing_emails() return filename, gspread_client, emails def _test_to_fromg_sheet(table, sheetname, cell_range, expected): filename, gspread_client, emails = _get_gspread_test_params() # test to from gsheet spread_id = togsheet( table, gspread_client, filename, worksheet=sheetname, share_emails=emails ) try: result = fromgsheet( gspread_client, filename, worksheet=sheetname, cell_range=cell_range ) # make sure the expected_result matches the result ieq(expected, result) finally: # clean up created table gspread_client.del_spreadsheet(spread_id) def _test_append_from_gsheet(table_list, expected, sheetname=None): filename, gspread_client, emails = _get_gspread_test_params() # append from the second table from the list table1 = table_list[0] other_tables = table_list[1:] # create the spreadshteet and the 1st sheet spread_id = togsheet( table1, gspread_client, filename, worksheet=sheetname, share_emails=emails ) try: for tableN in other_tables: appendgsheet( tableN, gspread_client, spread_id, worksheet=sheetname, open_by_key=True ) # read the result appended to the sheet result = fromgsheet( gspread_client, spread_id, worksheet=sheetname, open_by_key=True ) # make sure the expected_result matches the result ieq(expected, result) finally: # clean up created table gspread_client.del_spreadsheet(spread_id) def teardown_function(): # try to avoid: User rate limit exceeded. time.sleep(3) # endregion # region test cases data TEST_TABLE = [ ["foo", "bar"], ["A", "1"], ["B", "2"], ["C", "3"], ["D", "random_stuff-in+_名字"], ["é", "3/4/2012"], ["F", "6"], ] # endregion # region test cases execution @found_gcp_credentials def test_tofromgsheet_01_basic(): _test_to_fromg_sheet( TEST_TABLE[:], None, None, TEST_TABLE[:] ) @found_gcp_credentials def test_tofromgsheet_02_uneven_row(): test_table_t1 = [x + ["3"] if i in [2] else x for i, x in enumerate(TEST_TABLE[:])] test_table_f1 = [x + [""] if len(x) < 3 else x for x in test_table_t1[:]] _test_to_fromg_sheet( test_table_t1, None, None, test_table_f1 ) @found_gcp_credentials def test_tofromgsheet_03_empty_table(): _test_to_fromg_sheet( (), None, None, () ) @found_gcp_credentials def test_tofromgsheet_04_cell_range(): test_table_f2 = [[x[1]] for x in TEST_TABLE[0:4]] _test_to_fromg_sheet( TEST_TABLE[:], None, "B1:B4", test_table_f2 ) @found_gcp_credentials def test_tofromgsheet_05_sheet_title(): _test_to_fromg_sheet( TEST_TABLE[:], "random_stuff-in+_名字", None, TEST_TABLE[:] ) @found_gcp_credentials @pytest.mark.xfail( raises=TypeError, reason="When this stop failing, uncomment datetime.date in TEST1 and TEST2" ) def test_tofromgsheet_06_datetime_date(): test_table_dt = [[x[0], datetime.date(2012, 5, 6)] if i in [5] else x for i, x in enumerate(TEST_TABLE[:])] _test_to_fromg_sheet( test_table_dt[:], None, "B1:B4", test_table_dt[:] ) @found_gcp_credentials def test_tofromgsheet_07_open_by_key(): filename, gspread_client, emails = _get_gspread_test_params() # test to from gsheet table = TEST_TABLE[:] # test to from gsheet spread_id = togsheet(table, gspread_client, filename, share_emails=emails) try: result = fromgsheet(gspread_client, spread_id, open_by_key=True) # make sure the expected_result matches the result ieq(table, result) finally: # clean up created table gspread_client.del_spreadsheet(spread_id) @found_gcp_credentials def test_tofromgsheet_08_recreate(): filename, gspread_client, emails = _get_gspread_test_params() # test to from gsheet table1 = TEST_TABLE[:] table2 = [[ x[0] , text_type(i)] if i > 0 else x for i, x in enumerate(table1)] # test to from gsheet spread_id = togsheet(table1, gspread_client, filename, share_emails=emails) try: result1 = fromgsheet(gspread_client, spread_id, open_by_key=True) ieq(table1, result1) spread_id2 = togsheet(table2, gspread_client, filename, share_emails=emails) try: result2 = fromgsheet(gspread_client, spread_id2, open_by_key=True) ieq(table2, result2) finally: gspread_client.del_spreadsheet(spread_id2) # make sure the expected_result matches the result finally: # clean up created table gspread_client.del_spreadsheet(spread_id) def _get_testcase_for_append(): table_list = [TEST_TABLE[:], TEST_TABLE[:]] expected = TEST_TABLE[:] + TEST_TABLE[1:] return table_list, expected @found_gcp_credentials def test_appendgsheet_10_double(): table_list, expected = _get_testcase_for_append() _test_append_from_gsheet(table_list, expected) @found_gcp_credentials def test_appendgsheet_11_named_sheet(): table_list, expected = _get_testcase_for_append() _test_append_from_gsheet(table_list, expected, sheetname="petl_append") @found_gcp_credentials def test_appendgsheet_12_other_sheet(): filename, gspread_client, emails = _get_gspread_test_params() # test to append gsheet table = TEST_TABLE[:] table2 = TEST_TABLE[1:] spread_id = togsheet(table, gspread_client, filename, share_emails=emails) try: appendgsheet(table, gspread_client, filename, worksheet="petl") # get the results from the 2 sheets result1 = fromgsheet(gspread_client, filename, worksheet=None) ieq(result1, table) result2 = fromgsheet(gspread_client, filename, worksheet="petl") ieq(result2, table2) finally: gspread_client.del_spreadsheet(spread_id) # endregion
3,543
488
int main() { int s1=0,s2=0; do { s1=s1+1; if(s1>5) s1=s1+2; do { s2=s2+1; if(s2>5) s2=s2+2; } while (s2<10); } while (s1<10); s2=0; s1=0; return 0; }
151
482
<filename>driver/src/main/java/com/impossibl/postgres/jdbc/DataSourceSettings.java /** * Copyright (c) 2013, imp<EMAIL> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of impossibl.com nor the names of its contributors may * be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.impossibl.postgres.jdbc; import com.impossibl.postgres.system.Setting; import com.impossibl.postgres.system.SystemSettings; @Setting.Factory public class DataSourceSettings { @Setting.Group.Info( id = "jdbc-ds", desc = "JDBC DataSource Settings", order = 2 ) public static final Setting.Group DS = Setting.Group.declare(); @Setting.Info( desc = "Name of data source.", name = "data-source.name", group = "jdbc-ds", alternateNames = {"dataSourceName"} ) public static final Setting<String> DATASOURCE_NAME = Setting.declare(); public static final Setting<String> DATABASE_NAME = DS.add( SystemSettings.DATABASE_NAME ); @Setting.Info( desc = "Host name for TCP connections.", def = "localhost", name = "server.name", group = "jdbc-ds", alternateNames = {"serverName"} ) public static final Setting<String> SERVER_NAME = Setting.declare(); @Setting.Info( desc = "Port number for TCP connections.", def = "5432", min = 1, max = 65535, name = "port.number", group = "jdbc-ds", alternateNames = {"portNumber"} ) public static final Setting<Integer> PORT_NUMBER = Setting.declare(); @Setting.Info( desc = "Unix socket name for local server connections.", name = "server.local-name", group = "jdbc-ds", alternateNames = {"localServerName"} ) public static final Setting<String> LOCAL_SERVER_NAME = Setting.declare(); @Setting.Info( desc = "Comma separated list of server addresses for which a connection will be attempted in order.\n\n" + "Supports providing DNS, IPv4, IPv6 & Unix socket addresses. DNS & IP addresses are specified in " + "`host[:port]?` format while Unix socket addresses must contain a `/` to valid. Additionally IPv6 host " + "names must be enclosed in `[]`.\n\n" + "NOTE: Specifying a list of addresses takes precedence over `server.name`, `port.number` & " + "`server.local-name' and will cause those settings to be ignored.", name = "server.addresses", group = "jdbc-ds", alternateNames = {"serverAddresses"} ) public static final Setting<String> SERVER_ADDRESSES = Setting.declare(); @Setting.Info( desc = "Maximum time to wait for a connection to be established.", def = "0", min = 0, name = "login.timeout", group = "jdbc-ds", alternateNames = {"loginTimeout"} ) public static final Setting<Integer> LOGIN_TIMEOUT = Setting.declare(); static { DataSourceSettingsInit.init(); } }
1,402
1,006
<gh_stars>1000+ /**************************************************************************** * arch/arm/src/rp2040/rp2040_dmac.c * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The * ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * ****************************************************************************/ /**************************************************************************** * Included Files ****************************************************************************/ #include <nuttx/config.h> #include <sys/types.h> #include <stdint.h> #include <stdbool.h> #include <assert.h> #include <debug.h> #include <errno.h> #include <nuttx/irq.h> #include <nuttx/arch.h> #include <nuttx/semaphore.h> #include "arm_arch.h" #include "hardware/rp2040_dma.h" #include "rp2040_dmac.h" /**************************************************************************** * Pre-processor Definitions ****************************************************************************/ /**************************************************************************** * Private Types ****************************************************************************/ /* This structure describes one DMA channel */ struct dma_channel_s { uint8_t chan; /* DMA channel number (0-RP2040_DMA_NCHANNELS) */ bool inuse; /* TRUE: The DMA channel is in use */ dma_config_t config; /* Current configuration */ dma_callback_t callback; /* Callback invoked when the DMA completes */ void *arg; /* Argument passed to callback function */ }; /* This structure describes the state of the DMA controller */ struct dma_controller_s { sem_t exclsem; /* Protects channel table */ sem_t chansem; /* Count of free channels */ }; /**************************************************************************** * Private Data ****************************************************************************/ /* This is the overall state of the DMA controller */ static struct dma_controller_s g_dmac; /* This is the array of all DMA channels */ static struct dma_channel_s g_dmach[RP2040_DMA_NCHANNELS]; /**************************************************************************** * Public Data ****************************************************************************/ /**************************************************************************** * Private Functions ****************************************************************************/ /**************************************************************************** * Name: rp2040_dmac_interrupt * * Description: * DMA interrupt handler * ****************************************************************************/ static int rp2040_dmac_interrupt(int irq, void *context, FAR void *arg) { struct dma_channel_s *dmach; int result = OK; unsigned int ch; uint32_t stat; uint32_t ctrl; /* Get and clear pending DMA interrupt status */ stat = getreg32(RP2040_DMA_INTS0) & RP2040_DMA_INTS0_MASK; putreg32(stat, RP2040_DMA_INTS0); while (stat != 0) { ch = ffs(stat) - 1; stat &= ~(1 << ch); ctrl = getreg32(RP2040_DMA_CTRL_TRIG(ch)); if (ctrl & RP2040_DMA_CTRL_TRIG_AHB_ERROR) { setbits_reg32(RP2040_DMA_CTRL_TRIG_READ_ERROR | RP2040_DMA_CTRL_TRIG_WRITE_ERROR, RP2040_DMA_CTRL_TRIG(ch)); result = EIO; } dmach = &g_dmach[ch]; /* Call the DMA completion callback */ if (dmach->callback) { dmach->callback((DMA_HANDLE)dmach, result, dmach->arg); dmach->callback = NULL; } dmach->arg = NULL; } return OK; } /**************************************************************************** * Public Functions ****************************************************************************/ /**************************************************************************** * Name: arm_dma_initialize * * Description: * Initialize the DMA subsystem * * Returned Value: * None * ****************************************************************************/ void weak_function arm_dma_initialize(void) { int i; dmainfo("Initialize DMAC\n"); /* Initialize the channel list */ nxsem_init(&g_dmac.exclsem, 0, 1); nxsem_init(&g_dmac.chansem, 0, RP2040_DMA_NCHANNELS); for (i = 0; i < RP2040_DMA_NCHANNELS; i++) { g_dmach[i].chan = i; putreg32(0, RP2040_DMA_CTRL_TRIG(i)); } putreg32(0, RP2040_DMA_INTE0); putreg32(RP2040_DMA_INTS0_MASK, RP2040_DMA_INTS0); /* Attach DMA completion interrupt handler */ irq_attach(RP2040_DMA_IRQ_0, rp2040_dmac_interrupt, NULL); up_enable_irq(RP2040_DMA_IRQ_0); } /**************************************************************************** * Name: rp2040_dmachannel * * Description: * Allocate a DMA channel. This function gives the caller mutually * exclusive access to a DMA channel. * * If no DMA channel is available, then rp2040_dmachannel() will wait * until the holder of a channel relinquishes the channel by calling * rp2040_dmafree(). * * Input parameters: * None * * Returned Value: * This function ALWAYS returns a non-NULL, void* DMA channel handle. * * Assumptions: * - The caller can wait for a DMA channel to be freed if it is not * available. * ****************************************************************************/ DMA_HANDLE rp2040_dmachannel(void) { struct dma_channel_s *dmach; unsigned int ch; uint32_t bit = 0; int ret; /* Take a count from the channel counting semaphore. We may block * if there are no free channels. When we get the count, then we can * be assured that a channel is available in the channel list and is * reserved for us. */ ret = nxsem_wait_uninterruptible(&g_dmac.chansem); if (ret < 0) { return NULL; } /* Get exclusive access to the DMA channel list */ ret = nxsem_wait_uninterruptible(&g_dmac.exclsem); if (ret < 0) { nxsem_post(&g_dmac.chansem); return NULL; } /* Search for an available DMA channel */ for (ch = 0, dmach = NULL; ch < RP2040_DMA_NCHANNELS; ch++) { struct dma_channel_s *candidate = &g_dmach[ch]; if (!candidate->inuse) { dmach = candidate; dmach->inuse = true; bit = 1 << ch; break; } } nxsem_post(&g_dmac.exclsem); setbits_reg32(bit, RP2040_DMA_INTS0); setbits_reg32(bit, RP2040_DMA_INTE0); /* Since we have reserved a DMA descriptor by taking a count from chansem, * it would be a serious logic failure if we could not find a free channel * for our use. */ DEBUGASSERT(dmach); return (DMA_HANDLE)dmach; } /**************************************************************************** * Name: rp2040_dmafree * * Description: * Release a DMA channel. If another thread is waiting for this DMA * channel in a call to rp2040_dmachannel, then this function will * re-assign the DMA channel to that thread and wake it up. NOTE: The * 'handle' used in this argument must NEVER be used again until * rp2040_dmachannel() is called again to re-gain access to the channel. * * Returned Value: * None * * Assumptions: * - The caller holds the DMA channel. * - There is no DMA in progress * ****************************************************************************/ void rp2040_dmafree(DMA_HANDLE handle) { struct dma_channel_s *dmach = (struct dma_channel_s *)handle; unsigned int ch; DEBUGASSERT(dmach != NULL && dmach->inuse); ch = dmach->chan; dmainfo("DMA channel %d\n", ch); /* Disable the channel */ setbits_reg32(1 << dmach->chan, RP2040_DMA_CHAN_ABORT); putreg32(0, RP2040_DMA_CTRL_TRIG(ch)); clrbits_reg32(1 << dmach->chan, RP2040_DMA_INTE0); clrbits_reg32(1 << dmach->chan, RP2040_DMA_INTS0); /* Mark the channel no longer in use. Clearing the in-use flag is an * atomic operation and so should be safe. */ dmach->inuse = false; /* And increment the count of free channels... possibly waking up a * thread that may be waiting for a channel. */ nxsem_post(&g_dmac.chansem); } /**************************************************************************** * Name: rp2040_rxdmasetup * * Description: * Configure an RX (peripheral-to-memory) DMA before starting the transfer. * * Input Parameters: * paddr - Peripheral address (source) * maddr - Memory address (destination) * nbytes - Number of bytes to transfer. Must be an even multiple of the * configured transfer size. * config - Channel configuration selections * ****************************************************************************/ void rp2040_rxdmasetup(DMA_HANDLE handle, uintptr_t paddr, uintptr_t maddr, size_t nbytes, dma_config_t config) { struct dma_channel_s *dmach = (struct dma_channel_s *)handle; unsigned int ch; uint32_t count; uint32_t mask; uint32_t ctrl; DEBUGASSERT(dmach != NULL && dmach->inuse); ch = dmach->chan; /* Save the configuration (for rp2040_dmastart()). */ dmach->config = config; DEBUGASSERT(config.size >= RP2040_DMA_SIZE_BYTE && config.size <= RP2040_DMA_SIZE_WORD); mask = (1 << config.size) - 1; count = nbytes >> config.size; DEBUGASSERT(count > 0); /* Set DMA registers */ putreg32(paddr & ~mask, RP2040_DMA_READ_ADDR(ch)); putreg32(maddr & ~mask, RP2040_DMA_WRITE_ADDR(ch)); putreg32(count, RP2040_DMA_TRANS_COUNT(ch)); ctrl = RP2040_DMA_CTRL_TRIG_READ_ERROR | RP2040_DMA_CTRL_TRIG_WRITE_ERROR | ((config.dreq << RP2040_DMA_CTRL_TRIG_TREQ_SEL_SHIFT) & RP2040_DMA_CTRL_TRIG_TREQ_SEL_MASK) | ((ch << RP2040_DMA_CTRL_TRIG_CHAIN_TO_SHIFT) & RP2040_DMA_CTRL_TRIG_CHAIN_TO_MASK) | (config.size << RP2040_DMA_CTRL_TRIG_DATA_SIZE_SHIFT); if (!config.noincr) { ctrl |= RP2040_DMA_CTRL_TRIG_INCR_WRITE; } putreg32(ctrl, RP2040_DMA_CTRL_TRIG(ch)); } /**************************************************************************** * Name: rp2040_txdmasetup * * Description: * Configure an TX (memory-to-memory) DMA before starting the transfer. * * Input Parameters: * paddr - Peripheral address (destination) * maddr - Memory address (source) * nbytes - Number of bytes to transfer. Must be an even multiple of the * configured transfer size. * config - Channel configuration selections * ****************************************************************************/ void rp2040_txdmasetup(DMA_HANDLE handle, uintptr_t paddr, uintptr_t maddr, size_t nbytes, dma_config_t config) { struct dma_channel_s *dmach = (struct dma_channel_s *)handle; unsigned int ch; uint32_t count; uint32_t mask; uint32_t ctrl; DEBUGASSERT(dmach != NULL && dmach->inuse); ch = dmach->chan; /* Save the configuration (for rp2040_dmastart()). */ dmach->config = config; DEBUGASSERT(config.size >= RP2040_DMA_SIZE_BYTE && config.size <= RP2040_DMA_SIZE_WORD); mask = (1 << config.size) - 1; count = nbytes >> config.size; DEBUGASSERT(count > 0); /* Set DMA registers */ putreg32(maddr & ~mask, RP2040_DMA_READ_ADDR(ch)); putreg32(paddr & ~mask, RP2040_DMA_WRITE_ADDR(ch)); putreg32(count, RP2040_DMA_TRANS_COUNT(ch)); ctrl = RP2040_DMA_CTRL_TRIG_READ_ERROR | RP2040_DMA_CTRL_TRIG_WRITE_ERROR | ((config.dreq << RP2040_DMA_CTRL_TRIG_TREQ_SEL_SHIFT) & RP2040_DMA_CTRL_TRIG_TREQ_SEL_MASK) | ((ch << RP2040_DMA_CTRL_TRIG_CHAIN_TO_SHIFT) & RP2040_DMA_CTRL_TRIG_CHAIN_TO_MASK) | (config.size << RP2040_DMA_CTRL_TRIG_DATA_SIZE_SHIFT); if (!config.noincr) { ctrl |= RP2040_DMA_CTRL_TRIG_INCR_READ; } putreg32(ctrl, RP2040_DMA_CTRL_TRIG(ch)); } /**************************************************************************** * Name: rp2040_dmastart * * Description: * Start the DMA transfer * * Assumptions: * - DMA handle allocated by rp2040_dmachannel() * - No DMA in progress * ****************************************************************************/ void rp2040_dmastart(DMA_HANDLE handle, dma_callback_t callback, void *arg) { struct dma_channel_s *dmach = (struct dma_channel_s *)handle; uint32_t ch; DEBUGASSERT(dmach && dmach->inuse); ch = dmach->chan; /* Save the DMA complete callback info */ dmach->callback = callback; dmach->arg = arg; /* Enable the channel */ setbits_reg32(RP2040_DMA_CTRL_TRIG_EN, RP2040_DMA_CTRL_TRIG(ch)); } /**************************************************************************** * Name: rp2040_dmastop * * Description: * Cancel the DMA. After rp2040_dmastop() is called, the DMA channel is * reset and rp2040_dmasetup() must be called before rp2040_dmastart() * can be called again * * Assumptions: * - DMA handle allocated by rp2040_dmachannel() * ****************************************************************************/ void rp2040_dmastop(DMA_HANDLE handle) { struct dma_channel_s *dmach = (struct dma_channel_s *)handle; uint32_t bit; uint32_t stat; DEBUGASSERT(dmach); bit = 1 << dmach->chan; /* Disable the channel */ setbits_reg32(bit, RP2040_DMA_CHAN_ABORT); do { stat = getreg32(RP2040_DMA_CHAN_ABORT); } while (stat & bit); }
5,088
2,372
<filename>physx/source/pvd/include/PxPvdDataStream.h // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of NVIDIA CORPORATION nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ''AS IS'' AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY // OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Copyright (c) 2008-2021 NVIDIA Corporation. All rights reserved. #ifndef PXPVDSDK_PXPVDDATASTREAM_H #define PXPVDSDK_PXPVDDATASTREAM_H /** \addtogroup pvd @{ */ #include "pvd/PxPvd.h" #include "PxPvdErrorCodes.h" #include "PxPvdObjectModelBaseTypes.h" #if !PX_DOXYGEN namespace physx { namespace pvdsdk { #endif class PvdPropertyDefinitionHelper; class PvdMetaDataStream { protected: virtual ~PvdMetaDataStream() { } public: virtual PvdError createClass(const NamespacedName& nm) = 0; template <typename TDataType> PvdError createClass() { return createClass(getPvdNamespacedNameForType<TDataType>()); } virtual PvdError deriveClass(const NamespacedName& parent, const NamespacedName& child) = 0; template <typename TParentType, typename TChildType> PvdError deriveClass() { return deriveClass(getPvdNamespacedNameForType<TParentType>(), getPvdNamespacedNameForType<TChildType>()); } virtual bool isClassExist(const NamespacedName& nm) = 0; template <typename TDataType> bool isClassExist() { return isClassExist(getPvdNamespacedNameForType<TDataType>()); } virtual PvdError createProperty(const NamespacedName& clsName, const char* name, const char* semantic, const NamespacedName& dtypeName, PropertyType::Enum propertyType, DataRef<NamedValue> values = DataRef<NamedValue>()) = 0; template <typename TClsType, typename TDataType> PvdError createProperty(String name, String semantic = "", PropertyType::Enum propertyType = PropertyType::Scalar, DataRef<NamedValue> values = DataRef<NamedValue>()) { return createProperty(getPvdNamespacedNameForType<TClsType>(), name, semantic, getPvdNamespacedNameForType<TDataType>(), propertyType, values); } virtual PvdError createPropertyMessage(const NamespacedName& cls, const NamespacedName& msgName, DataRef<PropertyMessageArg> entries, uint32_t messageSizeInBytes) = 0; template <typename TClsType, typename TMsgType> PvdError createPropertyMessage(DataRef<PropertyMessageArg> entries) { return createPropertyMessage(getPvdNamespacedNameForType<TClsType>(), getPvdNamespacedNameForType<TMsgType>(), entries, sizeof(TMsgType)); } }; class PvdInstanceDataStream { protected: virtual ~PvdInstanceDataStream() { } public: virtual PvdError createInstance(const NamespacedName& cls, const void* instance) = 0; template <typename TDataType> PvdError createInstance(const TDataType* inst) { return createInstance(getPvdNamespacedNameForType<TDataType>(), inst); } virtual bool isInstanceValid(const void* instance) = 0; // If the property will fit or is already completely in memory virtual PvdError setPropertyValue(const void* instance, String name, DataRef<const uint8_t> data, const NamespacedName& incomingTypeName) = 0; template <typename TDataType> PvdError setPropertyValue(const void* instance, String name, const TDataType& value) { const uint8_t* dataStart = reinterpret_cast<const uint8_t*>(&value); return setPropertyValue(instance, name, DataRef<const uint8_t>(dataStart, dataStart + sizeof(TDataType)), getPvdNamespacedNameForType<TDataType>()); } template <typename TDataType> PvdError setPropertyValue(const void* instance, String name, const TDataType* value, uint32_t numItems) { const uint8_t* dataStart = reinterpret_cast<const uint8_t*>(value); return setPropertyValue(instance, name, DataRef<const uint8_t>(dataStart, dataStart + sizeof(TDataType) * numItems), getPvdNamespacedNameForType<TDataType>()); } // Else if the property is very large (contact reports) you can send it in chunks. virtual PvdError beginSetPropertyValue(const void* instance, String name, const NamespacedName& incomingTypeName) = 0; template <typename TDataType> PvdError beginSetPropertyValue(const void* instance, String name) { return beginSetPropertyValue(instance, name, getPvdNamespacedNameForType<TDataType>()); } virtual PvdError appendPropertyValueData(DataRef<const uint8_t> data) = 0; template <typename TDataType> PvdError appendPropertyValueData(const TDataType* value, uint32_t numItems) { const uint8_t* dataStart = reinterpret_cast<const uint8_t*>(value); return appendPropertyValueData(DataRef<const uint8_t>(dataStart, dataStart + numItems * sizeof(TDataType))); } virtual PvdError endSetPropertyValue() = 0; // Set a set of properties to various values on an object. virtual PvdError setPropertyMessage(const void* instance, const NamespacedName& msgName, DataRef<const uint8_t> data) = 0; template <typename TDataType> PvdError setPropertyMessage(const void* instance, const TDataType& value) { const uint8_t* dataStart = reinterpret_cast<const uint8_t*>(&value); return setPropertyMessage(instance, getPvdNamespacedNameForType<TDataType>(), DataRef<const uint8_t>(dataStart, sizeof(TDataType))); } // If you need to send of lot of identical messages, this avoids a hashtable lookup per message. virtual PvdError beginPropertyMessageGroup(const NamespacedName& msgName) = 0; template <typename TDataType> PvdError beginPropertyMessageGroup() { return beginPropertyMessageGroup(getPvdNamespacedNameForType<TDataType>()); } virtual PvdError sendPropertyMessageFromGroup(const void* instance, DataRef<const uint8_t> data) = 0; template <typename TDataType> PvdError sendPropertyMessageFromGroup(const void* instance, const TDataType& value) { const uint8_t* dataStart = reinterpret_cast<const uint8_t*>(&value); return sendPropertyMessageFromGroup(instance, DataRef<const uint8_t>(dataStart, sizeof(TDataType))); } virtual PvdError endPropertyMessageGroup() = 0; // These functions ensure the target array doesn't contain duplicates virtual PvdError pushBackObjectRef(const void* instId, String propName, const void* objRef) = 0; virtual PvdError removeObjectRef(const void* instId, String propName, const void* objRef) = 0; // Instance elimination. virtual PvdError destroyInstance(const void* key) = 0; // Profiling hooks virtual PvdError beginSection(const void* instance, String name) = 0; virtual PvdError endSection(const void* instance, String name) = 0; // Origin Shift virtual PvdError originShift(const void* scene, PxVec3 shift) = 0; public: /*For some cases, pvd command cannot be run immediately. For example, when create joints, while the actors may still *pending for insert, the joints update commands can be run deffered. */ class PvdCommand { public: // Assigned is needed for copying PvdCommand(const PvdCommand&) { } PvdCommand& operator=(const PvdCommand&) { return *this; } public: PvdCommand() { } virtual ~PvdCommand() { } // Not pure virtual so can have default PvdCommand obj virtual bool canRun(PvdInstanceDataStream&) { return false; } virtual void run(PvdInstanceDataStream&) { } }; // PVD SDK provide this helper function to allocate cmd's memory and release them at after flush the command queue virtual void* allocateMemForCmd(uint32_t length) = 0; // PVD will call the destructor of PvdCommand object at the end fo flushPvdCommand virtual void pushPvdCommand(PvdCommand& cmd) = 0; virtual void flushPvdCommand() = 0; }; class PvdDataStream : public PvdInstanceDataStream, public PvdMetaDataStream { protected: virtual ~PvdDataStream() { } public: virtual void release() = 0; virtual bool isConnected() = 0; virtual void addProfileZone(void* zone, const char* name) = 0; virtual void addProfileZoneEvent(void* zone, const char* name, uint16_t eventId, bool compileTimeEnabled) = 0; virtual PvdPropertyDefinitionHelper& getPropertyDefinitionHelper() = 0; virtual void setIsTopLevelUIElement(const void* instance, bool topLevel) = 0; virtual void sendErrorMessage(uint32_t code, const char* message, const char* file, uint32_t line) = 0; virtual void updateCamera(const char* name, const PxVec3& origin, const PxVec3& up, const PxVec3& target) = 0; /** \brief Create a new PvdDataStream. \param pvd A pointer to a valid PxPvd instance. This must be non-null. */ static PvdDataStream* create(PxPvd* pvd); }; #if !PX_DOXYGEN } // pvdsdk } // physx #endif /** @} */ #endif // PXPVDSDK_PXPVDDATASTREAM_H
3,458
8,570
{ "author": "Microsoft", "name": "Blazor WebAssembly App Empty", "description": "An empty project template for creating a Blazor app that runs on WebAssembly and is optionally hosted by an ASP.NET Core app. This template does not have any content in it.", "symbols/Framework/description": "The target framework for the project.", "symbols/Framework/choices/net7.0/description": "Target net7.0", "symbols/skipRestore/description": "If specified, skips the automatic restore of the project on create.", "symbols/Hosted/displayName": "ASP.NET Core _Hosted", "symbols/Hosted/description": "If specified, includes an ASP.NET Core host for the Blazor WebAssembly app.", "symbols/ExcludeLaunchSettings/description": "Whether to exclude launchSettings.json from the generated template.", "symbols/kestrelHttpPort/description": "Port number to use for the HTTP endpoint in launchSettings.json.", "symbols/kestrelHttpsPort/description": "Port number to use for the HTTPS endpoint in launchSettings.json. This option is only applicable when the parameter no-https is not used.", "symbols/iisHttpPort/description": "Port number to use for the IIS Express HTTP endpoint in launchSettings.json.", "symbols/iisHttpsPort/description": "Port number to use for the IIS Express HTTPS endpoint in launchSettings.json. This option is only applicable when the parameter no-https is not used.", "symbols/PWA/displayName": "_Progressive Web Application", "symbols/PWA/description": "If specified, produces a Progressive Web Application (PWA) supporting installation and offline use.", "symbols/HasHttpProfile/description": "Always have HTTP profile.", "symbols/NoHttps/description": "Whether to turn off HTTPS.", "postActions/restore/description": "Restore NuGet packages required by this project.", "postActions/restore/manualInstructions/default/text": "Run 'dotnet restore'", "postActions/restoreClient/description": "Restore NuGet packages required by this project.", "postActions/restoreClient/manualInstructions/default/text": "Run 'dotnet restore'" }
572
769
<gh_stars>100-1000 /* * Copyright (c) Baidu Inc. All rights reserved. * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.baidu.bjf.remoting.protobuf.code; import java.io.File; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.baidu.bjf.remoting.protobuf.FieldType; import com.baidu.bjf.remoting.protobuf.utils.ClassHelper; import com.baidu.bjf.remoting.protobuf.utils.FieldInfo; import com.baidu.bjf.remoting.protobuf.utils.FieldUtils; import com.baidu.bjf.remoting.protobuf.utils.ProtobufProxyUtils; import com.baidu.bjf.remoting.protobuf.utils.StringUtils; /** * Abstract {@link ICodeGenerator} implements. * * @author xiemalin * @since 1.10.7 */ public abstract class AbstractCodeGenerator implements ICodeGenerator { /** Logger for this class. */ private static final Logger LOGGER = LoggerFactory.getLogger(AbstractCodeGenerator.class.getCanonicalName()); /** The debug. */ protected boolean debug = false; /** The output path. */ protected File outputPath; /** The cls. */ protected Class<?> cls; /** The target proxy classname. */ private String targetProxyClassname; /** The fields. */ protected List<FieldInfo> fields; /** * Instantiates a new abstract code generator. * * @param cls the cls */ public AbstractCodeGenerator(Class<?> cls) { this.cls = cls; targetProxyClassname = ClassHelper.getInternalName(cls.getCanonicalName()); fields = ProtobufProxyUtils.fetchFieldInfos(cls, true); } /** * Gets the all dependencies classes. * * @param list the list * @return the all dependencies classes */ public void getAllDependenciesClasses(Set<Class> list) { if (list == null) { throw new RuntimeException("param 'list' is null."); } getAllDependenciesClasses(cls, list); } /** * Gets the all dependencies classes. * * @param cls the cls * @param list the list * @return the all dependencies classes */ private void getAllDependenciesClasses(Class cls, Set<Class> list) { if (list == null) { throw new RuntimeException("param 'list' is null."); } Set<Class> dependenciesClasses = getDependenciesClasses(cls); if (dependenciesClasses.isEmpty()) { return; } for (Class dependencyClass : dependenciesClasses) { if (list.contains(dependencyClass)) { continue; } list.add(dependencyClass); Set<Class> subDependenciesClasses = getDependenciesClasses(dependencyClass); if (subDependenciesClasses.isEmpty()) { continue; } for (Class subClass : subDependenciesClasses) { if (list.contains(subClass)) { continue; } list.add(subClass); getAllDependenciesClasses(subClass, list); } } } /** * Gets the dependencies classes. * * @param cls the cls * @return the dependencies classes */ public Set<Class> getDependenciesClasses(Class cls) { List<FieldInfo> fields = null; try { fields = ProtobufProxyUtils.fetchFieldInfos(cls, true); } catch (Exception e) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(e.getMessage(), e); } } if (fields == null) { return Collections.emptySet(); } Set<Class> dependenciesClasses = getDependenciesClasses(fields); return dependenciesClasses; } /** * Gets the dependencies classes. * * @return the dependencies classes */ public Set<Class> getDependenciesClasses() { return getDependenciesClasses(fields); } /** * Gets the dependencies classes. * * @param fields the fields * @return the dependencies classes */ public Set<Class> getDependenciesClasses(List<FieldInfo> fields) { if (fields == null) { return Collections.emptySet(); } Set<Class> ret = new HashSet<Class>(); for (FieldInfo fieldInfo : fields) { if (fieldInfo.isObjectType()) { if (fieldInfo.isList()) { Class<?> genericKeyType = fieldInfo.getGenericKeyType(); if (!FieldInfo.isPrimitiveType(genericKeyType)) { ret.add(genericKeyType); } } else { ret.add(fieldInfo.getField().getType()); } } else if (fieldInfo.isMap()) { Class<?> genericKeyType = fieldInfo.getGenericKeyType(); if (!FieldInfo.isPrimitiveType(genericKeyType)) { ret.add(genericKeyType); } Class<?> genericeValueType = fieldInfo.getGenericeValueType(); if (!FieldInfo.isPrimitiveType(genericeValueType)) { ret.add(genericeValueType); } } else if (fieldInfo.isList()) { Class<?> genericKeyType = fieldInfo.getGenericKeyType(); if (!FieldInfo.isPrimitiveType(genericKeyType)) { ret.add(genericKeyType); } } else if (fieldInfo.getFieldType().isEnum()) { ret.add(fieldInfo.getField().getType()); } } return ret; } /** * Gets the target proxy classname. * * @return the target proxy classname */ protected String getTargetProxyClassname() { return targetProxyClassname; } /** * Sets the output path. * * @param outputPath the new output path */ /* (non-Javadoc) * @see com.baidu.bjf.remoting.protobuf.code.ICodeGenerator#setOutputPath(java.io.File) */ @Override public void setOutputPath(File outputPath) { this.outputPath = outputPath; } /** * Gets the output path. * * @return the output path */ protected File getOutputPath() { return outputPath; } /** * Checks if is debug. * * @return true, if is debug */ /* (non-Javadoc) * @see com.baidu.bjf.remoting.protobuf.code.ICodeGenerator#isDebug() */ @Override public boolean isDebug() { return debug; } /** * Sets the debug. * * @param debug the new debug */ /* (non-Javadoc) * @see com.baidu.bjf.remoting.protobuf.code.ICodeGenerator#setDebug(boolean) */ @Override public void setDebug(boolean debug) { this.debug = debug; } /** * Gets the class name. * * @return the class name */ /* (non-Javadoc) * @see com.baidu.bjf.remoting.protobuf.code.ICodeGenerator#getClassName() */ @Override public String getClassName() { return ClassHelper.getClassName(cls); } /** * Gets the package. * * @return the package */ /* (non-Javadoc) * @see com.baidu.bjf.remoting.protobuf.code.ICodeGenerator#getPackage() */ @Override public String getPackage() { return ClassHelper.getPackage(cls); } /** * Gets the full class name. * * @return the full class name */ /* (non-Javadoc) * @see com.baidu.bjf.remoting.protobuf.code.ICodeGenerator#getFullClassName() */ @Override public String getFullClassName() { if (StringUtils.isEmpty(getPackage())) { return getClassName(); } return getPackage() + ClassHelper.PACKAGE_SEPARATOR + getClassName(); } /** * Check {@link FieldType} is validate to class type of {@link Field}. * * @param type the type * @param field the field */ protected void checkType(FieldType type, Field field) { Class<?> cls = field.getType(); if (type == FieldType.OBJECT || type == FieldType.ENUM) { return; } String javaType = type.getJavaType(); if (Integer.class.getSimpleName().equals(javaType)) { if (cls.getSimpleName().equals("int") || Integer.class.getSimpleName().equals(cls.getSimpleName())) { return; } throw new IllegalArgumentException(getMismatchTypeErroMessage(type, field)); } if (!javaType.equalsIgnoreCase(cls.getSimpleName()) && !javaType.equalsIgnoreCase(cls.getName())) { throw new IllegalArgumentException(getMismatchTypeErroMessage(type, field)); } } /** * get error message info by type not matched. * * @param type the type * @param field the field * @return error message for mismatch type */ private String getMismatchTypeErroMessage(FieldType type, Field field) { return "Type mismatch. @Protobuf required type '" + type.getJavaType() + "' but field type is '" + field.getType().getSimpleName() + "' of field name '" + field.getName() + "' on class " + field.getDeclaringClass().getCanonicalName(); } /** * get field access code. * * @param target target instance name * @param field java field instance * @param cls mapped class * @return full field access java code */ protected String getAccessByField(String target, Field field, Class<?> cls, boolean wildcardType) { if (field.getModifiers() == Modifier.PUBLIC && !wildcardType) { return target + ClassHelper.PACKAGE_SEPARATOR + field.getName(); } // check if has getter method String getter; if ("boolean".equalsIgnoreCase(field.getType().getCanonicalName())) { getter = "is" + CodedConstant.capitalize(field.getName()); } else { getter = "get" + CodedConstant.capitalize(field.getName()); } // check method exist try { cls.getMethod(getter, new Class<?>[0]); return target + ClassHelper.PACKAGE_SEPARATOR + getter + "()"; } catch (Exception e) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(e.getMessage(), e); } } String type = field.getType().getCanonicalName(); if ("[B".equals(type) || "[Ljava.lang.Byte;".equals(type) || "java.lang.Byte[]".equals(type)) { type = "byte[]"; } // use reflection to get value String code = "(" + FieldUtils.toObjectType(type) + ") "; code += "FieldUtils.getField(" + target + ", \"" + field.getName() + "\")"; return code; } }
5,702
354
#ifndef _ESEXTCTESSELLATIONSHADERBARRIER_HPP #define _ESEXTCTESSELLATIONSHADERBARRIER_HPP /*------------------------------------------------------------------------- * OpenGL Conformance Test Suite * ----------------------------- * * Copyright (c) 2014-2016 The Khronos Group Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /*! * \file * \brief */ /*-------------------------------------------------------------------*/ #include "gluShaderUtil.hpp" #include "tcuDefs.hpp" #include "../esextcTestCaseBase.hpp" namespace glcts { /* Groups all barrier tests */ class TessellationShaderBarrierTests : public TestCaseGroupBase { public: /* Public methods */ TessellationShaderBarrierTests(Context& context, const ExtParameters& extParams); virtual void init(void); }; /** Base class for all tests that check the memory barrier functionality. **/ class TessellationShaderBarrierTestCase : public TestCaseBase { public: /* Public methods */ TessellationShaderBarrierTestCase(Context& context, const ExtParameters& extParams, const char* name, const char* description); virtual ~TessellationShaderBarrierTestCase(void) { } virtual void deinit(); virtual void initTest(void); virtual IterateResult iterate(void); protected: /* Protected methods */ virtual void getDrawCallArgs(glw::GLenum* out_mode, glw::GLint* out_count, glw::GLenum* out_tf_mode, glw::GLint* out_n_patch_vertices, glw::GLint* out_n_instances) = 0; virtual const char* getTCSCode() = 0; virtual const char* getTESCode() = 0; virtual const char* getVSCode() = 0; virtual int getXFBBufferSize() = 0; virtual void getXFBProperties(int* out_n_names, const char*** out_names) = 0; virtual bool verifyXFBBuffer(const void* data) = 0; /* Protected variables */ glw::GLuint m_bo_id; glw::GLuint m_fs_id; glw::GLuint m_po_id; glw::GLuint m_tcs_id; glw::GLuint m_tes_id; glw::GLuint m_vao_id; glw::GLuint m_vs_id; }; /** Implementation of Test Case 22.1 * * Make sure that a barrier used in a tessellation control shader synchronizes * all instances working on the same patch. Tests the following scenario: * * * invocation A can correctly read a per-vertex & per-patch attribute * modified by invocation B after a barrier() call; **/ class TessellationShaderBarrier1 : public TessellationShaderBarrierTestCase { public: /* Public methods */ TessellationShaderBarrier1(Context& context, const ExtParameters& extParams); virtual ~TessellationShaderBarrier1(void) { } protected: /* Protected methods */ void getDrawCallArgs(glw::GLenum* out_mode, glw::GLint* out_count, glw::GLenum* out_tf_mode, glw::GLint* out_n_patch_vertices, glw::GLint* out_n_instances); const char* getTCSCode(); const char* getTESCode(); const char* getVSCode(); int getXFBBufferSize(); void getXFBProperties(int* out_n_names, const char*** out_names); bool verifyXFBBuffer(const void* data); private: /* Private fields */ unsigned int m_n_input_vertices; const unsigned int m_n_result_vertices; }; /** Implementation of Test Case 22.2 * * Make sure that a barrier used in a tessellation control shader synchronizes * all instances working on the same patch. Tests the following scenario: * * * invocation A writes to a per-patch output. A barrier is then issued, * after which invocation B overwrites the same per-patch output. One more * barrier is issued, after which invocation A should be able to read this * output correctly. **/ class TessellationShaderBarrier2 : public TessellationShaderBarrierTestCase { public: /* Public methods */ TessellationShaderBarrier2(Context& context, const ExtParameters& extParams); virtual ~TessellationShaderBarrier2(void) { } protected: /* Protected methods */ void getDrawCallArgs(glw::GLenum* out_mode, glw::GLint* out_count, glw::GLenum* out_tf_mode, glw::GLint* out_n_patch_vertices, glw::GLint* out_n_instances); const char* getTCSCode(); const char* getTESCode(); const char* getVSCode(); int getXFBBufferSize(); void getXFBProperties(int* out_n_names, const char*** out_names); bool verifyXFBBuffer(const void* data); private: /* Private fields */ unsigned int m_n_input_vertices; const unsigned int m_n_result_vertices; }; /** Implementation of Test Case 22.3 * * Make sure that a barrier used in a tessellation control shader synchronizes * all instances working on the same patch. Tests the following scenario: * * * even invocations should write their gl_InvocationID value to their * per-vertex output. A barrier is then issued, after which each odd invocation * should read values stored by preceding even invocation, add current * invocation's ID to that value and then write it to its per-vertex output. * One more barrier should be issued. Then, every fourth invocation should * read & sum up per-vertex outputs for four invocations following it * (including the one discussed), and store it in a per-patch variable. (n+1)-th, * (n+2)-th and (n+3)-th invocations should store zero in dedicated per-patch * variables. 16 invocations should be considered, with 10 instances used for the * draw call, each patch should consist of 8 vertices. **/ class TessellationShaderBarrier3 : public TessellationShaderBarrierTestCase { public: /* Public methods */ TessellationShaderBarrier3(Context& context, const ExtParameters& extParams); virtual ~TessellationShaderBarrier3(void) { } protected: /* Protected methods */ void getDrawCallArgs(glw::GLenum* out_mode, glw::GLint* out_count, glw::GLenum* out_tf_mode, glw::GLint* out_n_patch_vertices, glw::GLint* out_n_instances); const char* getTCSCode(); const char* getTESCode(); const char* getVSCode(); int getXFBBufferSize(); void getXFBProperties(int* out_n_names, const char*** out_names); bool verifyXFBBuffer(const void* data); private: /* Private fields */ unsigned int m_n_input_vertices; const unsigned int m_n_instances; const unsigned int m_n_invocations; const unsigned int m_n_patch_vertices; const unsigned int m_n_patches_per_invocation; const unsigned int m_n_result_vertices; }; } // namespace glcts #endif // _ESEXTCTESSELLATIONSHADERBARRIER_HPP
2,210
628
# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-07-18 13:51 from __future__ import unicode_literals import logging from django.db import migrations logger = logging.getLogger(__file__) def migrate_existing_registrations_into_osf_registries(state, schema): AbstractProvider = state.get_model('osf', 'abstractprovider') AbstractNode = state.get_model('osf', 'abstractnode') DraftRegistration = state.get_model('osf', 'draftregistration') try: osf_registries = AbstractProvider.objects.get(_id='osf', type='osf.registrationprovider') except AbstractProvider.DoesNotExist: # Allow test / local dev DBs to pass logger.warn('Unable to find OSF Registries provider - assuming test environment.') pass else: draft_registrations = DraftRegistration.objects.all() registrations = AbstractNode.objects.filter(type='osf.registration') updated_drafts = draft_registrations.update(provider_id=osf_registries.id) updated_registrations = registrations.update(provider_id=osf_registries.id) assert (updated_drafts, updated_registrations) == (draft_registrations.count(), registrations.count()) logger.info('Successfully migrated {} draft registrations and {} public registrations into OSFRegistries'.format(updated_drafts, updated_registrations)) def remove_existing_registrations_from_osf_registries(state, schema): AbstractProvider = state.get_model('osf', 'abstractprovider') try: osf_registries = AbstractProvider.objects.get(_id='osf', type='osf.registrationprovider') except AbstractProvider.DoesNotExist: pass else: total_registrations = osf_registries.registrations.count() + osf_registries.draft_registrations.count() osf_registries.draft_registrations.clear() osf_registries.registrations.clear() logger.info('Successfully removed {} public and draft registrations from OSFRegistries'.format(total_registrations)) class Migration(migrations.Migration): dependencies = [ ('osf', '0134_add_provider_reg_fks'), ] operations = [ migrations.RunPython( migrate_existing_registrations_into_osf_registries, remove_existing_registrations_from_osf_registries ) ]
834
3,794
<filename>examples/protorpc/handler/handler.h<gh_stars>1000+ #ifndef HV_PROTO_RPC_HANDLER_H_ #define HV_PROTO_RPC_HANDLER_H_ #include "../router.h" void error_response(protorpc::Response* res, int code, const std::string& message) { res->mutable_error()->set_code(code); res->mutable_error()->set_message(message); } void not_found(const protorpc::Request& req, protorpc::Response* res) { error_response(res, 404, "Not Found"); } void bad_request(const protorpc::Request& req, protorpc::Response* res) { error_response(res, 400, "Bad Request"); } #endif // HV_PROTO_RPC_HANDLER_H_
242
335
<reponame>Safal08/Hacktoberfest-1 { "word": "Vex", "definitions": [ "Make (someone) feel annoyed, frustrated, or worried, especially with trivial matters.", "Be annoyed, irritated, or unhappy.", "Cause distress to." ], "parts-of-speech": "Verb" }
116
341
<gh_stars>100-1000 package com.ccnode.codegenerator.service.pojo; /** * What always stop you is what you always believe. * <p> * Created by zhengjun.du on 2016/07/05 16:38 */ public class RegisterRequest extends ServerRequest { String license; public String getLicense() { return license; } public void setLicense(String license) { this.license = license; } }
141
416
package org.simpleflatmapper.datastax.test; import com.datastax.driver.core.*; import com.datastax.driver.core.ResultSet; import org.junit.Test; import org.simpleflatmapper.datastax.DatastaxMapper; import org.simpleflatmapper.datastax.DatastaxMapperFactory; import org.simpleflatmapper.test.beans.DbObject; import org.simpleflatmapper.test.beans.TestAffinityObject; import java.text.SimpleDateFormat; import java.util.Iterator; import static org.junit.Assert.*; public class DatastaxMapperFactoryTest extends AbstractDatastaxTest { @Test public void testDynamicMapper() throws Exception { testInSession(new Callback() { @Override public void call(Session session) throws Exception { final DatastaxMapper<DbObject> mapper = DatastaxMapperFactory.newInstance().mapTo(DbObject.class); ResultSet rs = session.execute("select id, name, email, creation_time, type_ordinal, type_name from dbobjects"); final Iterator<DbObject> iterator = mapper.iterator(rs); DbObject next = iterator.next(); assertEquals(1, next.getId()); assertEquals("<NAME>", next.getName()); assertEquals("<EMAIL>", next.getEmail()); assertEquals(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse("2012-10-02 12:10:10"), next.getCreationTime()); assertEquals(DbObject.Type.type2, next.getTypeOrdinal()); assertEquals(DbObject.Type.type3, next.getTypeName()); assertFalse(iterator.hasNext()); rs = session.execute("select id, name from dbobjects"); next = mapper.iterator(rs).next(); assertEquals(1, next.getId()); assertEquals("<NAME>", next.getName()); assertNull(next.getEmail()); assertNull(next.getCreationTime()); assertNull(next.getTypeOrdinal()); assertNull(next.getTypeName()); rs = session.execute("select id, email from dbobjects"); next = mapper.iterator(rs).next(); assertEquals(1, next.getId()); assertNull(next.getName()); assertEquals("<EMAIL>", next.getEmail()); assertNull(next.getCreationTime()); assertNull(next.getTypeOrdinal()); assertNull(next.getTypeName()); rs = session.execute("select id, type_ordinal from dbobjects"); next = mapper.iterator(rs).next(); assertEquals(1, next.getId()); assertNull(next.getName()); assertNull(next.getEmail()); assertNull(next.getCreationTime()); assertEquals(DbObject.Type.type2, next.getTypeOrdinal()); assertNull(next.getTypeName()); } }); } @Test public void testAlias() throws Exception { testInSession(new Callback() { @Override public void call(Session session) throws Exception { final DatastaxMapper<DbObject> mapper = DatastaxMapperFactory.newInstance().addAlias("firstname", "name").mapTo(DbObject.class); ResultSet rs = session.execute("select id, email as firstname from dbobjects"); final Iterator<DbObject> iterator = mapper.iterator(rs); DbObject o = iterator.next(); assertEquals(1, o.getId()); assertEquals("<EMAIL>", o.getName()); } }); } @Test public void testTypeAffinity() throws Exception { testInSession(new Callback() { @Override public void call(Session session) throws Exception { final DatastaxMapper<TestAffinityObject> mapper = DatastaxMapperFactory.newInstance().mapTo(TestAffinityObject.class); ResultSet rs = session.execute("select id as fromInt, email as fromString from dbobjects"); final Iterator<TestAffinityObject> iterator = mapper.iterator(rs); TestAffinityObject o = iterator.next(); assertEquals(1, o.fromInt.i); assertNull(o.fromInt.str); assertEquals("<EMAIL>", o.fromString.str); assertEquals(0, o.fromString.i); } }); } }
1,983
306
<gh_stars>100-1000 { "files": { "readmeSize": 803, "testsSize": 783379, "hasNpmIgnore": true, "hasChangelog": true }, "badges": [ { "info": { "service": "circleci", "type": "build" }, "urls": { "content": "https://img.shields.io/circleci/project/github/facebook/jest.json", "original": "https://circleci.com/gh/facebook/jest.svg?style=shield", "service": "https://circleci.com/gh/facebook/jest.svg", "shields": "https://img.shields.io/circleci/project/github/facebook/jest.svg" } }, { "urls": { "original": "https://travis-ci.org/facebook/jest.svg?branch=master", "service": "https://api.travis-ci.org/facebook/jest.svg?branch=master", "shields": "https://img.shields.io/travis/facebook/jest/master.svg", "content": "https://img.shields.io/travis/facebook/jest/master.json" }, "info": { "service": "travis", "type": "build", "modifiers": { "branch": "master" } } }, { "info": { "service": "appveyor", "type": "build" }, "urls": { "content": "https://img.shields.io/appveyor/ci/8n38o44k585hhvhd/branch/master.json", "original": "https://ci.appveyor.com/api/projects/status/8n38o44k585hhvhd/branch/master?svg=true", "service": "https://ci.appveyor.com/api/projects/status/8n38o44k585hhvhd/branch/master", "shields": "https://img.shields.io/appveyor/ci/8n38o44k585hhvhd/branch/master.svg" } } ], "linters": [ "eslint", "prettier" ], "coverage": 0.64 }
836
575
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/code_cache/generated_code_cache_context.h" #include <memory> #include "base/bind.h" #include "base/files/file_path.h" #include "base/task/post_task.h" #include "content/browser/code_cache/generated_code_cache.h" #include "content/public/browser/browser_task_traits.h" #include "content/public/browser/browser_thread.h" #include "third_party/blink/public/common/features.h" namespace content { GeneratedCodeCacheContext::GeneratedCodeCacheContext() { DCHECK_CURRENTLY_ON(BrowserThread::UI); } void GeneratedCodeCacheContext::Initialize(const base::FilePath& path, int max_bytes) { DCHECK_CURRENTLY_ON(BrowserThread::UI); generated_js_code_cache_ = std::make_unique<GeneratedCodeCache>( path.AppendASCII("js"), max_bytes, GeneratedCodeCache::CodeCacheType::kJavaScript); generated_wasm_code_cache_ = std::make_unique<GeneratedCodeCache>( path.AppendASCII("wasm"), max_bytes, GeneratedCodeCache::CodeCacheType::kWebAssembly); } void GeneratedCodeCacheContext::Shutdown() { DCHECK_CURRENTLY_ON(BrowserThread::UI); generated_js_code_cache_.reset(); generated_wasm_code_cache_.reset(); } GeneratedCodeCache* GeneratedCodeCacheContext::generated_js_code_cache() const { DCHECK_CURRENTLY_ON(BrowserThread::UI); return generated_js_code_cache_.get(); } GeneratedCodeCache* GeneratedCodeCacheContext::generated_wasm_code_cache() const { DCHECK_CURRENTLY_ON(BrowserThread::UI); return generated_wasm_code_cache_.get(); } GeneratedCodeCacheContext::~GeneratedCodeCacheContext() = default; } // namespace content
626
678
<filename>iOSOpenDev/frameworks/IMCore.framework/Frameworks/IMDaemonCore.framework/Headers/IMDMessageStore.h /** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/IMCore.framework/Frameworks/IMDaemonCore.framework/IMDaemonCore */ #import <IMDaemonCore/IMDaemonCore-Structs.h> #import <IMDaemonCore/XXUnknownSuperclass.h> @class NSThread, NSRunLoop, NSString, NSNumber; @interface IMDMessageStore : XXUnknownSuperclass { NSThread *_databaseThread; // 4 = 0x4 NSRunLoop *_runLoop; // 8 = 0x8 CFRunLoopSourceRef _runLoopSource; // 12 = 0xc NSString *_modificationStamp; // 16 = 0x10 NSNumber *_cachedUnreadMessageCount; // 20 = 0x14 } @property(retain) NSString *modificationStamp; // G=0x1b075; S=0x1b089; @synthesize=_modificationStamp @property(readonly, retain) NSThread *databaseThread; // G=0x18555; converted property + (void)_updateCacheForMessageGUID:(id)messageGUID; // 0x185a5 + (id)sharedInstance; // 0x18005 // declared property setter: - (void)setModificationStamp:(id)stamp; // 0x1b089 // declared property getter: - (id)modificationStamp; // 0x1b075 - (long long)unreadMessageCountWithRoomName:(id)roomName onService:(id)service; // 0x1b059 - (long long)unreadMessageCountWithHandle:(id)handle onService:(id)service; // 0x1b03d - (long long)unreadMessagesCount; // 0x1afbd - (id)markMessagesAsReadWithRoomname:(id)roomname onService:(id)service upToGUID:(id)guid fromMe:(BOOL)me; // 0x1aed9 - (id)markMessagesAsReadWithHandle:(id)handle onService:(id)service upToGUID:(id)guid fromMe:(BOOL)me; // 0x1adf5 - (id)deleteMessagesWithRoomname:(id)roomname onService:(id)service; // 0x1ab55 - (id)deleteMessagesWithHandles:(id)handles onService:(id)service; // 0x1a83d - (id)deleteMessageGUIDs:(id)guids; // 0x1a5a9 - (id)chatForMessageGUID:(id)messageGUID; // 0x1a3e5 - (id)chatForMessage:(id)message; // 0x1a3b5 - (id)messagesWithRoomName:(id)roomName onService:(id)service backToMessageGUID:(id)messageGUID; // 0x1a26d - (id)messagesWithHandles:(id)handles onService:(id)service backToMessageGUID:(id)messageGUID; // 0x1a125 - (id)messagesWithRoomName:(id)roomName onService:(id)service limit:(unsigned)limit; // 0x19fe1 - (id)messagesWithHandles:(id)handles onService:(id)service limit:(unsigned)limit; // 0x19e9d - (id)messageWithGUID:(id)guid; // 0x19c99 - (BOOL)hasStoredMessageWithGUID:(id)guid; // 0x19add - (id)messagesWithGUIDs:(id)guids; // 0x199a1 - (void)registerTransfersWithGUIDs:(id)guids; // 0x1973d - (id)_messagesWithRoomName:(id)roomName onService:(id)service limit:(unsigned)limit; // 0x195f5 - (id)_messagesWithHandles:(id)handles onService:(id)service limit:(unsigned)limit; // 0x19429 - (id)_messagesWithGUIDs:(id)guids; // 0x192ed - (id)updateMessage:(id)message; // 0x19091 - (id)storeMessage:(id)message forceReplace:(BOOL)replace; // 0x188b1 - (id)storeMessage:(id)message; // 0x1889d - (void)_storeAttachmentsForMessage:(id)message; // 0x187a1 - (void)performBlock:(id)block waitUntilDone:(BOOL)done; // 0x18585 - (void)performBlock:(id)block; // 0x18565 // converted property getter: - (id)databaseThread; // 0x18555 - (void)_updateModificationDate; // 0x184f9 - (void)dealloc; // 0x18485 - (void)_performInitialHousekeeping; // 0x181ad - (id)init; // 0x180b9 - (BOOL)retainWeakReference; // 0x180b5 - (BOOL)allowsWeakReference; // 0x180b1 - (void)_threadedMain; // 0x17e2d @end
1,332
5,788
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.sharding.route.engine.validator.ddl.impl; import org.apache.shardingsphere.infra.binder.statement.SQLStatementContext; import org.apache.shardingsphere.infra.exception.ShardingSphereException; import org.apache.shardingsphere.infra.metadata.schema.ShardingSphereSchema; import org.apache.shardingsphere.infra.route.context.RouteContext; import org.apache.shardingsphere.sharding.route.engine.validator.ddl.ShardingDDLStatementValidator; import org.apache.shardingsphere.sharding.rule.ShardingRule; import org.apache.shardingsphere.sql.parser.sql.common.segment.ddl.index.IndexSegment; import org.apache.shardingsphere.sql.parser.sql.common.statement.ddl.AlterIndexStatement; import org.apache.shardingsphere.sql.parser.sql.dialect.handler.ddl.AlterIndexStatementHandler; import java.util.List; import java.util.Optional; /** * Sharding alter index statement validator. */ public final class ShardingAlterIndexStatementValidator extends ShardingDDLStatementValidator<AlterIndexStatement> { @Override public void preValidate(final ShardingRule shardingRule, final SQLStatementContext<AlterIndexStatement> sqlStatementContext, final List<Object> parameters, final ShardingSphereSchema schema) { Optional<IndexSegment> index = sqlStatementContext.getSqlStatement().getIndex(); if (index.isPresent() && !isSchemaContainsIndex(schema, index.get())) { throw new ShardingSphereException("Index '%s' does not exist.", index.get().getIdentifier().getValue()); } Optional<IndexSegment> renameIndex = AlterIndexStatementHandler.getRenameIndexSegment(sqlStatementContext.getSqlStatement()); if (renameIndex.isPresent() && isSchemaContainsIndex(schema, renameIndex.get())) { throw new ShardingSphereException("Index '%s' already exists.", renameIndex.get().getIdentifier().getValue()); } } @Override public void postValidate(final ShardingRule shardingRule, final SQLStatementContext<AlterIndexStatement> sqlStatementContext, final RouteContext routeContext, final ShardingSphereSchema schema) { } }
941
669
<filename>onnxruntime/core/providers/nnapi/nnapi_builtin/builders/helper.cc // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. #include <iostream> #include <string> #include <vector> #include "helper.h" #include "core/common/safeint.h" #include "core/common/logging/logging.h" #include "core/framework/tensorprotoutils.h" #include "core/graph/graph.h" #include "core/graph/graph_viewer.h" #include "core/providers/common.h" #include "core/providers/shared/node_unit/node_unit.h" #include "core/providers/shared/utils/utils.h" #include "op_support_checker.h" namespace onnxruntime { namespace nnapi { std::string GetErrorCause(int error_code) { switch (error_code) { case ANEURALNETWORKS_NO_ERROR: return "ANEURALNETWORKS_NO_ERROR"; case ANEURALNETWORKS_OUT_OF_MEMORY: return "ANEURALNETWORKS_OUT_OF_MEMORY"; case ANEURALNETWORKS_INCOMPLETE: return "ANEURALNETWORKS_INCOMPLETE"; case ANEURALNETWORKS_UNEXPECTED_NULL: return "ANEURALNETWORKS_UNEXPECTED_NULL"; case ANEURALNETWORKS_BAD_DATA: return "ANEURALNETWORKS_BAD_DATA"; case ANEURALNETWORKS_OP_FAILED: return "ANEURALNETWORKS_OP_FAILED"; case ANEURALNETWORKS_BAD_STATE: return "ANEURALNETWORKS_BAD_STATE"; case ANEURALNETWORKS_UNMAPPABLE: return "ANEURALNETWORKS_UNMAPPABLE"; case ANEURALNETWORKS_OUTPUT_INSUFFICIENT_SIZE: return "ANEURALNETWORKS_OUTPUT_INSUFFICIENT_SIZE"; case ANEURALNETWORKS_UNAVAILABLE_DEVICE: return "ANEURALNETWORKS_UNAVAILABLE_DEVICE"; default: return "Unknown error code: " + std::to_string(error_code); } } QuantizedOpType GetQuantizedOpType(const NodeUnit& node_unit) { const auto& op_type = node_unit.OpType(); if (node_unit.UnitType() == NodeUnit::Type::SingleNode) { if (op_type == "DequantizeLinear") return QuantizedOpType::DequantizeLinear; else if (op_type == "QuantizeLinear") return QuantizedOpType::QuantizeLinear; else if (op_type == "QLinearConv") return QuantizedOpType::QLinearConv; else if (op_type == "QLinearMatMul") return QuantizedOpType::QLinearMatMul; else if (op_type == "QLinearAdd") return QuantizedOpType::QLinearAdd; else if (op_type == "QLinearMul") return QuantizedOpType::QLinearMul; else if (op_type == "QLinearSigmoid") return QuantizedOpType::QLinearSigmoid; else if (op_type == "QLinearAveragePool") return QuantizedOpType::QLinearAveragePool; } else if (node_unit.UnitType() == NodeUnit::Type::QDQGroup) { if (op_type == "Conv") return QuantizedOpType::QDQConv; else if (op_type == "Resize") return QuantizedOpType::QDQResize; else if (op_type == "AveragePool") return QuantizedOpType::QDQAveragePool; else if (op_type == "Add") return QuantizedOpType::QDQAdd; else if (op_type == "Mul") return QuantizedOpType::QDQMul; else if (op_type == "Transpose") return QuantizedOpType::QDQTranspose; else if (op_type == "Reshape") return QuantizedOpType::QDQReshape; else if (op_type == "Softmax") return QuantizedOpType::QDQSoftmax; else if (op_type == "Concat") return QuantizedOpType::QDQConcat; else if (op_type == "Gemm") return QuantizedOpType::QDQGemm; else if (op_type == "MatMul") return QuantizedOpType::QDQMatMul; } else { // throw? } return QuantizedOpType::Unknown; } ConvType GetConvType(const NodeUnit& node_unit, const InitializedTensorSet& initializers) { NodeAttrHelper helper(node_unit); const auto group = helper.Get("group", 1); const auto& weight = node_unit.Inputs()[1].node_arg.Name(); const auto& weight_tensor = *initializers.at(weight); // For ONNX we only have 1 conv ops // For NNAPI we have 3 // Input is (N, C, H, W) // group == 1, --> regular conv // group != 1 && weight is (M, 1, kH, kW), --> depthwise conv // group != 1 && weight is (M, C/group, kH, kW), --> grouped conv if (group == 1) return ConvType::Regular; else if ((weight_tensor.dims()[1] == 1)) return ConvType::Depthwise; else return ConvType::Grouped; } bool IsQuantizedConv(QuantizedOpType quant_op_type) { return (quant_op_type == QuantizedOpType::QLinearConv) || (quant_op_type == QuantizedOpType::QDQConv); } bool IsQuantizedPool(QuantizedOpType quant_op_type) { return (quant_op_type == QuantizedOpType::QLinearAveragePool) || (quant_op_type == QuantizedOpType::QDQAveragePool); } bool IsQuantizedGemm(QuantizedOpType quant_op_type) { return (quant_op_type == QuantizedOpType::QLinearMatMul) || (quant_op_type == QuantizedOpType::QDQGemm) || (quant_op_type == QuantizedOpType::QDQMatMul); } bool IsQuantizedBinaryOp(QuantizedOpType quant_op_type) { return quant_op_type == QuantizedOpType::QLinearMatMul || quant_op_type == QuantizedOpType::QLinearAdd || quant_op_type == QuantizedOpType::QLinearMul || quant_op_type == QuantizedOpType::QDQAdd || quant_op_type == QuantizedOpType::QDQMul || quant_op_type == QuantizedOpType::QDQGemm || quant_op_type == QuantizedOpType::QDQMatMul || IsQuantizedConv(quant_op_type); } bool HasValidBinaryOpQuantizedInputTypes(const NodeUnit& node_unit) { auto quant_op_type = GetQuantizedOpType(node_unit); int32_t a_input_type, b_input_type; if (!IsQuantizedBinaryOp(quant_op_type)) { LOGS_DEFAULT(VERBOSE) << "[" << node_unit.OpType() << "] is not a binary qlinear op"; return false; } const auto& inputs = node_unit.Inputs(); if (!GetType(inputs[0].node_arg, a_input_type)) return false; if (!GetType(inputs[1].node_arg, b_input_type)) return false; // QlinearConv/MatMul/QDQGemm/QDQMatMul supports u8u8 or u8s8 // QLinearAdd/QLinearMul only support u8u8 bool is_quant_conv_or_gemm = IsQuantizedConv(quant_op_type) || IsQuantizedGemm(quant_op_type); bool has_valid_qlinear_conv_weight = (b_input_type == ONNX_NAMESPACE::TensorProto_DataType_UINT8 || b_input_type == ONNX_NAMESPACE::TensorProto_DataType_INT8); if (a_input_type != ONNX_NAMESPACE::TensorProto_DataType_UINT8 || (!is_quant_conv_or_gemm && a_input_type != b_input_type) || (is_quant_conv_or_gemm && !has_valid_qlinear_conv_weight)) { LOGS_DEFAULT(VERBOSE) << "[" << node_unit.OpType() << "] A Input type: [" << a_input_type << "] B Input type: [" << b_input_type << "] is not supported for now"; return false; } return true; } common::Status GetQuantizationScaleAndZeroPoint( const InitializedTensorSet& initializers, const NodeUnitIODef& io_def, const Path& model_path, float& scale, int32_t& zero_point) { scale = 0.0f; zero_point = 0; if (!io_def.quant_param) { // Not a quantized IO return ORT_MAKE_STATUS(ONNXRUNTIME, INVALID_ARGUMENT, "NodeArg: ", io_def.node_arg.Name(), " is not quantized"); } const auto unpack_tensor = [&model_path](const InitializedTensorSet& initializers, const std::string& name, std::vector<uint8_t>& unpacked_tensor) { const auto& tensor = *initializers.at(name); ORT_RETURN_IF_ERROR( onnxruntime::utils::UnpackInitializerData(tensor, model_path, unpacked_tensor)); return Status::OK(); }; const auto& quant_param = *io_def.quant_param; { // get the scale std::vector<uint8_t> unpacked_tensor; const auto& name = quant_param.scale.Name(); ORT_RETURN_IF_ERROR(unpack_tensor(initializers, name, unpacked_tensor)); // The scale should be one or more floats ORT_RETURN_IF(unpacked_tensor.size() < 4, "The initializer [", name, "] should have one or more floats ", "with size no less than 4, actual size: ", unpacked_tensor.size()); scale = reinterpret_cast<const float*>(unpacked_tensor.data())[0]; } if (quant_param.zero_point) { // get the zero point if it's there std::vector<uint8_t> unpacked_tensor; const auto& name = quant_param.zero_point->Name(); ORT_RETURN_IF_ERROR(unpack_tensor(initializers, name, unpacked_tensor)); ORT_RETURN_IF(unpacked_tensor.empty(), "The initializer [", name, "] is empty"); // Onnx quantization uses uint8 [int8 not yet supported], need to cast to int32_t used by NNAPI zero_point = static_cast<int32_t>(unpacked_tensor[0]); } return Status::OK(); } common::Status GetQuantizationScaleAndZeroPoint( const InitializedTensorSet& initializers, const NodeUnit& node_unit, const std::string& name, float& scale, int32_t& zero_point, ArgType arg_type) { const auto& io_defs = arg_type == ArgType::kInput ? node_unit.Inputs() : node_unit.Outputs(); for (const auto& io_def : io_defs) { if (io_def.node_arg.Name() == name) return GetQuantizationScaleAndZeroPoint(initializers, io_def, node_unit.ModelPath(), scale, zero_point); } return ORT_MAKE_STATUS(ONNXRUNTIME, INVALID_ARGUMENT, "Unknown input: ", name, ", for NodeUnit with node index: ", node_unit.Index()); } bool GetShape(const NodeArg& node_arg, Shape& shape) { shape.clear(); const auto* shape_proto = node_arg.Shape(); if (!shape_proto) { LOGS_DEFAULT(WARNING) << "NodeArg [" << node_arg.Name() << "] has no shape info"; return false; } // NNAPI uses 0 for dynamic dimension, which is the default value for dim.dim_value() for (const auto& dim : shape_proto->dim()) shape.push_back(SafeInt<uint32_t>(dim.dim_value())); return true; } bool GetType(const NodeArg& node_arg, int32_t& type) { type = ONNX_NAMESPACE::TensorProto_DataType_UNDEFINED; const auto* type_proto = node_arg.TypeAsProto(); if (!type_proto || !type_proto->has_tensor_type() || !type_proto->tensor_type().has_elem_type()) { LOGS_DEFAULT(WARNING) << "NodeArg [" << node_arg.Name() << "] has no input type"; return false; } type = type_proto->tensor_type().elem_type(); return true; } void GetFlattenOutputShape(const NodeUnit& node_unit, const Shape& input_shape, int32_t& dim_1, int32_t& dim_2) { int32_t rank = static_cast<int>(input_shape.size()); NodeAttrHelper helper(node_unit); int32_t axis = helper.Get("axis", 1); // axis == rank is a valid input, but invalid for HandleNegativeAxis // Skip non-negative axis here if (axis < 0) axis = static_cast<int32_t>(HandleNegativeAxis(axis, rank)); dim_1 = std::accumulate(input_shape.cbegin(), input_shape.cbegin() + axis, 1, std::multiplies<int32_t>()); dim_2 = std::accumulate(input_shape.cbegin() + axis, input_shape.cend(), 1, std::multiplies<int32_t>()); } bool IsValidSupportedNodeGroup(const std::vector<const Node*>& supported_node_partition) { if (supported_node_partition.size() == 1) { const auto* node = supported_node_partition[0]; const auto& op = node->OpType(); // It is not worth it to perform a single Reshape/Flatten/Identity operator // which is only copying the data in NNAPI // If this is the case, let it fall back if (op == "Reshape" || op == "Flatten" || op == "Identity") { return false; } } return true; } static bool IsInternalQuantizedNodeUnit(const NodeUnit& node_unit) { // First, ignore QDQ NodeUnit which is not internal quantized node if (node_unit.UnitType() == NodeUnit::Type::QDQGroup) return false; // These operators can use uint8 input without specific QLinear version of it // However, the mode has to be internal to the graph/partition (they cannot consume graph inputs) static const std::unordered_set<std::string> internal_quantized_op_types = { "Transpose", "Resize", "Concat", "MaxPool", }; const auto& node = node_unit.GetNode(); if (!Contains(internal_quantized_op_types, node.OpType())) return false; int32_t input_type; ORT_ENFORCE(GetType(*node.InputDefs()[0], input_type)); return input_type == ONNX_NAMESPACE::TensorProto_DataType_UINT8; } // We support some operators running using uint8 internally // These nodes cannot use a graph input as input since onnx graph input does not carry scale/zero point info bool IsInternalQuantizationSupported(const Node& node, const std::unordered_set<std::string>& node_outputs_in_group) { const auto& op_type = node.OpType(); // The node's input(s) have to be an output of node(s) within the group // If not, then this node is using graph/partition input(s) as input(s) const auto& input_defs = node.InputDefs(); // We only need to check input0 for all operators except "Concat" bool check_all_inputs = op_type == "Concat"; for (size_t i = 0; i < (check_all_inputs ? input_defs.size() : 1); i++) { if (!Contains(node_outputs_in_group, input_defs[i]->Name())) { LOGS_DEFAULT(VERBOSE) << "Node [" << node.Name() << "] type: [" << op_type << "] has input [" << input_defs[i]->Name() << "] does not support using graph input(quantized) as node input"; return false; } } return true; } bool IsNodeSupported(const NodeUnit& node_unit, const GraphViewer& graph_viewer, const OpSupportCheckParams& params) { const auto& op_support_checkers = GetOpSupportCheckers(); const auto op_support_checker_it = op_support_checkers.find(node_unit.OpType()); if (op_support_checker_it == op_support_checkers.end()) { return false; } const auto* op_support_checker = op_support_checker_it->second; return op_support_checker->IsOpSupported(graph_viewer.GetAllInitializedTensors(), node_unit, params); } bool IsNodeSupportedInGroup(const NodeUnit& node_unit, const GraphViewer& graph_viewer, const OpSupportCheckParams& params, const std::unordered_set<std::string>& node_outputs_in_group) { if (!IsNodeSupported(node_unit, graph_viewer, params)) return false; // We also want to check if the node is supported as an internal quantized node_unit if (IsInternalQuantizedNodeUnit(node_unit)) return IsInternalQuantizationSupported(node_unit.GetNode(), node_outputs_in_group); return true; } std::string Shape2String(const std::vector<uint32_t>& shape) { std::ostringstream os; os << "[ "; for (const auto& dim : shape) os << dim << " "; os << "]"; return os.str(); } bool CheckIsInitializer(const InitializedTensorSet& initializers, const NodeUnit& node_unit, const std::string& input_name, const char* input_description) { if (!Contains(initializers, input_name)) { LOGS_DEFAULT(VERBOSE) << input_description << " of " << node_unit.Name() << "of type [" << node_unit.OpType() << "] must be an initializer tensor"; return false; } return true; } } // namespace nnapi } // namespace onnxruntime
6,065
1,350
# MIT License # # Copyright (C) The Adversarial Robustness Toolbox (ART) Authors 2018 # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the "Software"), to deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the # Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from __future__ import absolute_import, division, print_function, unicode_literals import logging import unittest import numpy as np from sklearn.tree import DecisionTreeRegressor from art.estimators.regression.scikitlearn import ScikitlearnDecisionTreeRegressor from art.estimators.regression.scikitlearn import ScikitlearnRegressor from tests.utils import TestBase, master_seed logger = logging.getLogger(__name__) class TestScikitlearnDecisionTreeRegressor(TestBase): @classmethod def setUpClass(cls): master_seed(seed=1234) super().setUpClass() cls.sklearn_model = DecisionTreeRegressor() cls.classifier = ScikitlearnDecisionTreeRegressor(model=cls.sklearn_model) cls.classifier.fit(x=cls.x_train_iris, y=cls.y_train_iris) def test_type(self): self.assertIsInstance(self.classifier, type(ScikitlearnRegressor(model=self.sklearn_model))) with self.assertRaises(TypeError): ScikitlearnDecisionTreeRegressor(model="sklearn_model") def test_predict(self): y_predicted = self.classifier.predict(self.x_test_iris[0:1]) y_expected = np.asarray([2.0]) np.testing.assert_array_almost_equal(y_predicted, y_expected, decimal=4) def test_save(self): self.classifier.save(filename="test.file", path=None) self.classifier.save(filename="test.file", path="./") def test_clone_for_refitting(self): _ = self.classifier.clone_for_refitting() if __name__ == "__main__": unittest.main()
893
1,375
// Copyright lowRISC contributors. // Licensed under the Apache License, Version 2.0, see LICENSE for details. // SPDX-License-Identifier: Apache-2.0 #ifndef OPENTITAN_SW_DEVICE_LIB_DIF_AUTOGEN_DIF_ALERT_HANDLER_AUTOGEN_H_ #define OPENTITAN_SW_DEVICE_LIB_DIF_AUTOGEN_DIF_ALERT_HANDLER_AUTOGEN_H_ // THIS FILE HAS BEEN GENERATED, DO NOT EDIT MANUALLY. COMMAND: // util/make_new_dif.py --mode=regen --only=autogen /** * @file * @brief <a href="/hw/ip/alert_handler/doc/">ALERT_HANDLER</a> Device Interface * Functions */ #include <stdbool.h> #include <stdint.h> #include "sw/device/lib/base/macros.h" #include "sw/device/lib/base/mmio.h" #include "sw/device/lib/dif/dif_base.h" #ifdef __cplusplus extern "C" { #endif // __cplusplus /** * A handle to alert_handler. * * This type should be treated as opaque by users. */ typedef struct dif_alert_handler { /** * The base address for the alert_handler hardware registers. */ mmio_region_t base_addr; } dif_alert_handler_t; /** * Creates a new handle for a(n) alert_handler peripheral. * * This function does not actuate the hardware. * * @param base_addr The MMIO base address of the alert_handler peripheral. * @param[out] alert_handler Out param for the initialized handle. * @return The result of the operation. */ OT_WARN_UNUSED_RESULT dif_result_t dif_alert_handler_init(mmio_region_t base_addr, dif_alert_handler_t *alert_handler); /** * A alert_handler interrupt request type. */ typedef enum dif_alert_handler_irq { /** * Interrupt state bit of Class A. Set by HW in case an alert within this * class triggered. Defaults true, write one to clear. */ kDifAlertHandlerIrqClassa = 0, /** * Interrupt state bit of Class B. Set by HW in case an alert within this * class triggered. Defaults true, write one to clear. */ kDifAlertHandlerIrqClassb = 1, /** * Interrupt state bit of Class C. Set by HW in case an alert within this * class triggered. Defaults true, write one to clear. */ kDifAlertHandlerIrqClassc = 2, /** * Interrupt state bit of Class D. Set by HW in case an alert within this * class triggered. Defaults true, write one to clear. */ kDifAlertHandlerIrqClassd = 3, } dif_alert_handler_irq_t; /** * A snapshot of the state of the interrupts for this IP. * * This is an opaque type, to be used with the * `dif_alert_handler_irq_get_state()` function. */ typedef uint32_t dif_alert_handler_irq_state_snapshot_t; /** * Returns the state of all interrupts (i.e., pending or not) for this IP. * * @param alert_handler A alert_handler handle. * @param[out] snapshot Out-param for interrupt state snapshot. * @return The result of the operation. */ OT_WARN_UNUSED_RESULT dif_result_t dif_alert_handler_irq_get_state( const dif_alert_handler_t *alert_handler, dif_alert_handler_irq_state_snapshot_t *snapshot); /** * Returns whether a particular interrupt is currently pending. * * @param alert_handler A alert_handler handle. * @param irq An interrupt request. * @param[out] is_pending Out-param for whether the interrupt is pending. * @return The result of the operation. */ OT_WARN_UNUSED_RESULT dif_result_t dif_alert_handler_irq_is_pending( const dif_alert_handler_t *alert_handler, dif_alert_handler_irq_t irq, bool *is_pending); /** * Acknowledges all interrupts, indicating to the hardware that all * interrupts have been successfully serviced. * * @param alert_handler A alert_handler handle. * @return The result of the operation. */ OT_WARN_UNUSED_RESULT dif_result_t dif_alert_handler_irq_acknowledge_all( const dif_alert_handler_t *alert_handler); /** * Acknowledges a particular interrupt, indicating to the hardware that it has * been successfully serviced. * * @param alert_handler A alert_handler handle. * @param irq An interrupt request. * @return The result of the operation. */ OT_WARN_UNUSED_RESULT dif_result_t dif_alert_handler_irq_acknowledge( const dif_alert_handler_t *alert_handler, dif_alert_handler_irq_t irq); /** * Forces a particular interrupt, causing it to be serviced as if hardware had * asserted it. * * @param alert_handler A alert_handler handle. * @param irq An interrupt request. * @return The result of the operation. */ OT_WARN_UNUSED_RESULT dif_result_t dif_alert_handler_irq_force( const dif_alert_handler_t *alert_handler, dif_alert_handler_irq_t irq); /** * A snapshot of the enablement state of the interrupts for this IP. * * This is an opaque type, to be used with the * `dif_alert_handler_irq_disable_all()` and * `dif_alert_handler_irq_restore_all()` functions. */ typedef uint32_t dif_alert_handler_irq_enable_snapshot_t; /** * Checks whether a particular interrupt is currently enabled or disabled. * * @param alert_handler A alert_handler handle. * @param irq An interrupt request. * @param[out] state Out-param toggle state of the interrupt. * @return The result of the operation. */ OT_WARN_UNUSED_RESULT dif_result_t dif_alert_handler_irq_get_enabled( const dif_alert_handler_t *alert_handler, dif_alert_handler_irq_t irq, dif_toggle_t *state); /** * Sets whether a particular interrupt is currently enabled or disabled. * * @param alert_handler A alert_handler handle. * @param irq An interrupt request. * @param state The new toggle state for the interrupt. * @return The result of the operation. */ OT_WARN_UNUSED_RESULT dif_result_t dif_alert_handler_irq_set_enabled( const dif_alert_handler_t *alert_handler, dif_alert_handler_irq_t irq, dif_toggle_t state); /** * Disables all interrupts, optionally snapshotting all enable states for later * restoration. * * @param alert_handler A alert_handler handle. * @param[out] snapshot Out-param for the snapshot; may be `NULL`. * @return The result of the operation. */ OT_WARN_UNUSED_RESULT dif_result_t dif_alert_handler_irq_disable_all( const dif_alert_handler_t *alert_handler, dif_alert_handler_irq_enable_snapshot_t *snapshot); /** * Restores interrupts from the given (enable) snapshot. * * @param alert_handler A alert_handler handle. * @param snapshot A snapshot to restore from. * @return The result of the operation. */ OT_WARN_UNUSED_RESULT dif_result_t dif_alert_handler_irq_restore_all( const dif_alert_handler_t *alert_handler, const dif_alert_handler_irq_enable_snapshot_t *snapshot); #ifdef __cplusplus } // extern "C" #endif // __cplusplus #endif // OPENTITAN_SW_DEVICE_LIB_DIF_AUTOGEN_DIF_ALERT_HANDLER_AUTOGEN_H_
2,294
2,921
{ "name": "Wagerr", "symbol": "WWGR", "type": "ERC20", "decimals": 8, "description": "Tokenized version of the Wagerr", "website": "https://www.wagerr.com", "explorer": "https://etherscan.io/token/0xC237868a9c5729bdF3173dDDacaa336a0a5BB6e0", "status": "active", "id": "0xC237868a9c5729bdF3173dDDacaa336a0a5BB6e0" }
179
678
<gh_stars>100-1000 /** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/OfficeImport.framework/OfficeImport */ #import <OfficeImport/MFCocoaBrush.h> @class NSColorStub; __attribute__((visibility("hidden"))) @interface MFCocoaHatchBrush : MFCocoaBrush { @private NSColorStub *m_colour; // 4 = 0x4 int m_style; // 8 = 0x8 } + (id)hatchBrushWithColour:(id)colour :(int)arg2; // 0x20f08d - (id)initWithColour:(id)colour :(int)arg2; // 0x20f0cd - (void)dealloc; // 0x20f139 - (void)fillPath:(id)path :(id)arg2; // 0x20f135 @end
244
5,079
<reponame>yetsun/hue #------------------------------------------------------------------------------ # Copyright 2018, Oracle and/or its affiliates. All rights reserved. #------------------------------------------------------------------------------ #------------------------------------------------------------------------------ # AdvancedQueuingNotification.py # This script demonstrates using advanced queuing notification. Once this # script is running, use another session to enqueue a few messages to the # "BOOKS" queue. This is most easily accomplished by running the # AdvancedQueuing sample. # # This script requires cx_Oracle 6.4 and higher. #------------------------------------------------------------------------------ from __future__ import print_function import cx_Oracle import SampleEnv import threading import time registered = True def callback(message): global registered print("Message type:", message.type) if message.type == cx_Oracle.EVENT_DEREG: print("Deregistration has taken place...") registered = False return print("Queue name:", message.queueName) print("Consumer name:", message.consumerName) connection = cx_Oracle.Connection(SampleEnv.MAIN_CONNECT_STRING, events = True) sub = connection.subscribe(namespace = cx_Oracle.SUBSCR_NAMESPACE_AQ, name = "BOOKS", callback = callback, timeout = 300) print("Subscription:", sub) print("--> Connection:", sub.connection) print("--> Callback:", sub.callback) print("--> Namespace:", sub.namespace) print("--> Protocol:", sub.protocol) print("--> Timeout:", sub.timeout) while registered: print("Waiting for notifications....") time.sleep(5)
445
5,169
<filename>Specs/RTDraggableBadge/0.1.1/RTDraggableBadge.podspec.json { "name": "RTDraggableBadge", "version": "0.1.1", "summary": "A drag to clear badge view", "description": "This project is inspired by Mobile QQ, a drag to clear badge view.", "homepage": "https://github.com/rickytan/RTDraggableBadge", "screenshots": "https://github.com/rickytan/RTDraggableBadge/raw/master/demo.gif", "license": "MIT", "authors": { "rickytan": "<EMAIL>" }, "source": { "git": "https://github.com/rickytan/RTDraggableBadge.git", "tag": "0.1.1" }, "platforms": { "ios": "7.0" }, "requires_arc": true, "source_files": "Pod/Classes/**/*", "resource_bundles": { "RTDraggableBadge": [ "Pod/Assets/*.png" ] } }
324
317
/* * Copyright (C) 2005-2020 Centre National d'Etudes Spatiales (CNES) * * This file is part of Orfeo Toolbox * * https://www.orfeo-toolbox.org/ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef otbTileDimensionTiledStreamingManager_hxx #define otbTileDimensionTiledStreamingManager_hxx #include "otbTileDimensionTiledStreamingManager.h" #include "otbMacro.h" #include "otbImageRegionSquareTileSplitter.h" namespace otb { template <class TImage> TileDimensionTiledStreamingManager<TImage>::TileDimensionTiledStreamingManager() : m_TileDimension(0) { } template <class TImage> TileDimensionTiledStreamingManager<TImage>::~TileDimensionTiledStreamingManager() { } template <class TImage> void TileDimensionTiledStreamingManager<TImage>::PrepareStreaming(itk::DataObject* /*input*/, const RegionType& region) { if (m_TileDimension < 16) { itkWarningMacro(<< "TileDimension inferior to 16 : using 16 as tile dimension") m_TileDimension = 16; } // Calculate number of split this->m_Splitter = otb::ImageRegionSquareTileSplitter<itkGetStaticConstMacro(ImageDimension)>::New(); unsigned int nbDesiredTiles = itk::Math::Ceil<unsigned int>(double(region.GetNumberOfPixels()) / (m_TileDimension * m_TileDimension)); this->m_ComputedNumberOfSplits = this->m_Splitter->GetNumberOfSplits(region, nbDesiredTiles); this->m_Region = region; } } // End namespace otb #endif
650
14,668
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "mojo/public/cpp/bindings/lib/handle_serialization.h" #include "base/numerics/safe_conversions.h" #include "mojo/public/cpp/bindings/lib/bindings_internal.h" #include "mojo/public/cpp/bindings/lib/pending_receiver_state.h" namespace mojo { namespace internal { void SerializeHandle(ScopedHandle handle, Message& message, Handle_Data& data) { if (!handle.is_valid()) { data.value = kEncodedInvalidHandleValue; return; } auto& handles = *message.mutable_handles(); data.value = base::checked_cast<uint32_t>(handles.size()); handles.push_back(std::move(handle)); } void SerializeInterfaceInfo(ScopedMessagePipeHandle handle, uint32_t version, Message& message, Interface_Data& data) { SerializeHandle(ScopedHandle::From(std::move(handle)), message, data.handle); data.version = version; } void SerializeAssociatedEndpoint(ScopedInterfaceEndpointHandle handle, Message& message, AssociatedEndpointHandle_Data& data) { if (!handle.is_valid()) { data.value = kEncodedInvalidHandleValue; return; } auto& handles = *message.mutable_associated_endpoint_handles(); data.value = base::checked_cast<uint32_t>(handles.size()); handles.push_back(std::move(handle)); } void SerializeAssociatedInterfaceInfo(ScopedInterfaceEndpointHandle handle, uint32_t version, Message& message, AssociatedInterface_Data& data) { SerializeAssociatedEndpoint(std::move(handle), message, data.handle); data.version = version; } ScopedHandle DeserializeHandle(const Handle_Data& data, Message& message) { if (!data.is_valid()) return {}; auto& handles = *message.mutable_handles(); DCHECK_LT(data.value, handles.size()); return std::move(handles[data.value]); } void DeserializeHandleAsReceiver(const Handle_Data& data, Message& message, PendingReceiverState& receiver_state) { receiver_state.pipe = ScopedMessagePipeHandle::From(DeserializeHandle(data, message)); if (message.receiver_connection_group()) receiver_state.connection_group = *message.receiver_connection_group(); } ScopedInterfaceEndpointHandle DeserializeAssociatedEndpointHandle( const AssociatedEndpointHandle_Data& data, Message& message) { if (!data.is_valid()) return {}; auto& handles = *message.mutable_associated_endpoint_handles(); DCHECK_LT(data.value, handles.size()); return std::move(handles[data.value]); } } // namespace internal } // namespace mojo
1,149
362
<filename>test/src/mocks/mock_nothrowallocator.cpp #include "mock_nothrowallocator.h"
36
1,949
/* * Copyright (c) 2016-2016, <NAME> * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this * list of conditions and the following disclaimer in the documentation and/or * other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef SQLPP11_INCONSISTENT_H #define SQLPP11_INCONSISTENT_H #include <utility> namespace sqlpp { #if defined(__clang__) || defined(_MSC_VER) template <typename Check> using inconsistent = Check; #else // This version circumvents an ambiguity problem with gcc // See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=77449 template <typename Check> using inconsistent = typename std::enable_if<not std::is_same<consistent_t, Check>::value, Check>::type; #endif } // namespace sqlpp #endif
569
622
from runtime import * ''' for loop tests ''' def main(): a = [1,2,3] y = 0 for x in a: y += x assert( y==6 ) z = '' arr = ['a', 'b', 'c'] for v in arr: z += v assert( z == 'abc' ) b = False if 'a' in arr: b = True assert( b == True ) s = 'hello world' z = '' for char in iter(s): z += char assert( z == 'hello world' ) b = False if 'hello' in s: b = True assert( b==True ) print 'testing for loop over dict' ob = {'a' : 'A', 'b' : 'B'} k = '' v = '' for key in iter(ob): k += key v += ob[key] print k print v assert(k=='ab' or k=='ba') assert(v=='AB' or v=='BA') keys = [] values = [] for x,y in ob.items(): keys.append( x ) values.append( y ) assert( 'a' in keys ) assert( 'A' in values ) ob2 = {'c':'C', 'd':'D'} e = 0 arr = [] for x,y in ob.items(): arr.append(x) arr.append(y) for w,z in ob2.items(): e += 1 arr.append(w) arr.append(z) assert( e==4 ) assert( 'a' in arr) assert( 'b' in arr) assert( 'A' in arr) assert( 'B' in arr) assert( 'c' in arr) assert( 'C' in arr) assert( 'd' in arr) assert( 'D' in arr) main()
545