max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
1,311
// SPDX-FileCopyrightText: 2021 <NAME> // SPDX-License-Identifier: MIT #include <Jolt.h> #include <Geometry/AABox.h> #include <Geometry/OrientedBox.h> namespace JPH { bool OrientedBox::Overlaps(const AABox &inBox, float inEpsilon) const { // Taken from: Real Time Collision Detection - <NAME> // Chapter 4.4.1, page 103-105. // Note that the code is swapped around: A is the aabox and B is the oriented box (this saves us from having to invert the orientation of the oriented box) // Convert AABox to center / extent representation Vec3 a_center = inBox.GetCenter(); Vec3 a_half_extents = inBox.GetExtent(); // Compute rotation matrix expressing b in a's coordinate frame Mat44 rot(mOrientation.GetColumn4(0), mOrientation.GetColumn4(1), mOrientation.GetColumn4(2), mOrientation.GetColumn4(3) - Vec4(a_center, 0)); // Compute common subexpressions. Add in an epsilon term to // counteract arithmetic errors when two edges are parallel and // their cross product is (near) null (see text for details) Vec3 epsilon = Vec3::sReplicate(inEpsilon); Vec3 abs_r[3] { rot.GetAxisX().Abs() + epsilon, rot.GetAxisY().Abs() + epsilon, rot.GetAxisZ().Abs() + epsilon }; // Test axes L = A0, L = A1, L = A2 float ra, rb; for (int i = 0; i < 3; i++) { ra = a_half_extents[i]; rb = mHalfExtents[0] * abs_r[0][i] + mHalfExtents[1] * abs_r[1][i] + mHalfExtents[2] * abs_r[2][i]; if (abs(rot(i, 3)) > ra + rb) return false; } // Test axes L = B0, L = B1, L = B2 for (int i = 0; i < 3; i++) { ra = a_half_extents.Dot(abs_r[i]); rb = mHalfExtents[i]; if (abs(rot.GetTranslation().Dot(rot.GetColumn3(i))) > ra + rb) return false; } // Test axis L = A0 x B0 ra = a_half_extents[1] * abs_r[0][2] + a_half_extents[2] * abs_r[0][1]; rb = mHalfExtents[1] * abs_r[2][0] + mHalfExtents[2] * abs_r[1][0]; if (abs(rot(2, 3) * rot(1, 0) - rot(1, 3) * rot(2, 0)) > ra + rb) return false; // Test axis L = A0 x B1 ra = a_half_extents[1] * abs_r[1][2] + a_half_extents[2] * abs_r[1][1]; rb = mHalfExtents[0] * abs_r[2][0] + mHalfExtents[2] * abs_r[0][0]; if (abs(rot(2, 3) * rot(1, 1) - rot(1, 3) * rot(2, 1)) > ra + rb) return false; // Test axis L = A0 x B2 ra = a_half_extents[1] * abs_r[2][2] + a_half_extents[2] * abs_r[2][1]; rb = mHalfExtents[0] * abs_r[1][0] + mHalfExtents[1] * abs_r[0][0]; if (abs(rot(2, 3) * rot(1, 2) - rot(1, 3) * rot(2, 2)) > ra + rb) return false; // Test axis L = A1 x B0 ra = a_half_extents[0] * abs_r[0][2] + a_half_extents[2] * abs_r[0][0]; rb = mHalfExtents[1] * abs_r[2][1] + mHalfExtents[2] * abs_r[1][1]; if (abs(rot(0, 3) * rot(2, 0) - rot(2, 3) * rot(0, 0)) > ra + rb) return false; // Test axis L = A1 x B1 ra = a_half_extents[0] * abs_r[1][2] + a_half_extents[2] * abs_r[1][0]; rb = mHalfExtents[0] * abs_r[2][1] + mHalfExtents[2] * abs_r[0][1]; if (abs(rot(0, 3) * rot(2, 1) - rot(2, 3) * rot(0, 1)) > ra + rb) return false; // Test axis L = A1 x B2 ra = a_half_extents[0] * abs_r[2][2] + a_half_extents[2] * abs_r[2][0]; rb = mHalfExtents[0] * abs_r[1][1] + mHalfExtents[1] * abs_r[0][1]; if (abs(rot(0, 3) * rot(2, 2) - rot(2, 3) * rot(0, 2)) > ra + rb) return false; // Test axis L = A2 x B0 ra = a_half_extents[0] * abs_r[0][1] + a_half_extents[1] * abs_r[0][0]; rb = mHalfExtents[1] * abs_r[2][2] + mHalfExtents[2] * abs_r[1][2]; if (abs(rot(1, 3) * rot(0, 0) - rot(0, 3) * rot(1, 0)) > ra + rb) return false; // Test axis L = A2 x B1 ra = a_half_extents[0] * abs_r[1][1] + a_half_extents[1] * abs_r[1][0]; rb = mHalfExtents[0] * abs_r[2][2] + mHalfExtents[2] * abs_r[0][2]; if (abs(rot(1, 3) * rot(0, 1) - rot(0, 3) * rot(1, 1)) > ra + rb) return false; // Test axis L = A2 x B2 ra = a_half_extents[0] * abs_r[2][1] + a_half_extents[1] * abs_r[2][0]; rb = mHalfExtents[0] * abs_r[1][2] + mHalfExtents[1] * abs_r[0][2]; if (abs(rot(1, 3) * rot(0, 2) - rot(0, 3) * rot(1, 2)) > ra + rb) return false; // Since no separating axis is found, the OBB and AAB must be intersecting return true; } bool OrientedBox::Overlaps(const OrientedBox &inBox, float inEpsilon) const { // Taken from: Real Time Collision Detection - <NAME> // Chapter 4.4.1, page 103-105. // Note that A is this, B is inBox // Compute rotation matrix expressing b in a's coordinate frame Mat44 rot = mOrientation.InversedRotationTranslation() * inBox.mOrientation; // Compute common subexpressions. Add in an epsilon term to // counteract arithmetic errors when two edges are parallel and // their cross product is (near) null (see text for details) Vec3 epsilon = Vec3::sReplicate(inEpsilon); Vec3 abs_r[3] { rot.GetAxisX().Abs() + epsilon, rot.GetAxisY().Abs() + epsilon, rot.GetAxisZ().Abs() + epsilon }; // Test axes L = A0, L = A1, L = A2 float ra, rb; for (int i = 0; i < 3; i++) { ra = mHalfExtents[i]; rb = inBox.mHalfExtents[0] * abs_r[0][i] + inBox.mHalfExtents[1] * abs_r[1][i] + inBox.mHalfExtents[2] * abs_r[2][i]; if (abs(rot(i, 3)) > ra + rb) return false; } // Test axes L = B0, L = B1, L = B2 for (int i = 0; i < 3; i++) { ra = mHalfExtents.Dot(abs_r[i]); rb = inBox.mHalfExtents[i]; if (abs(rot.GetTranslation().Dot(rot.GetColumn3(i))) > ra + rb) return false; } // Test axis L = A0 x B0 ra = mHalfExtents[1] * abs_r[0][2] + mHalfExtents[2] * abs_r[0][1]; rb = inBox.mHalfExtents[1] * abs_r[2][0] + inBox.mHalfExtents[2] * abs_r[1][0]; if (abs(rot(2, 3) * rot(1, 0) - rot(1, 3) * rot(2, 0)) > ra + rb) return false; // Test axis L = A0 x B1 ra = mHalfExtents[1] * abs_r[1][2] + mHalfExtents[2] * abs_r[1][1]; rb = inBox.mHalfExtents[0] * abs_r[2][0] + inBox.mHalfExtents[2] * abs_r[0][0]; if (abs(rot(2, 3) * rot(1, 1) - rot(1, 3) * rot(2, 1)) > ra + rb) return false; // Test axis L = A0 x B2 ra = mHalfExtents[1] * abs_r[2][2] + mHalfExtents[2] * abs_r[2][1]; rb = inBox.mHalfExtents[0] * abs_r[1][0] + inBox.mHalfExtents[1] * abs_r[0][0]; if (abs(rot(2, 3) * rot(1, 2) - rot(1, 3) * rot(2, 2)) > ra + rb) return false; // Test axis L = A1 x B0 ra = mHalfExtents[0] * abs_r[0][2] + mHalfExtents[2] * abs_r[0][0]; rb = inBox.mHalfExtents[1] * abs_r[2][1] + inBox.mHalfExtents[2] * abs_r[1][1]; if (abs(rot(0, 3) * rot(2, 0) - rot(2, 3) * rot(0, 0)) > ra + rb) return false; // Test axis L = A1 x B1 ra = mHalfExtents[0] * abs_r[1][2] + mHalfExtents[2] * abs_r[1][0]; rb = inBox.mHalfExtents[0] * abs_r[2][1] + inBox.mHalfExtents[2] * abs_r[0][1]; if (abs(rot(0, 3) * rot(2, 1) - rot(2, 3) * rot(0, 1)) > ra + rb) return false; // Test axis L = A1 x B2 ra = mHalfExtents[0] * abs_r[2][2] + mHalfExtents[2] * abs_r[2][0]; rb = inBox.mHalfExtents[0] * abs_r[1][1] + inBox.mHalfExtents[1] * abs_r[0][1]; if (abs(rot(0, 3) * rot(2, 2) - rot(2, 3) * rot(0, 2)) > ra + rb) return false; // Test axis L = A2 x B0 ra = mHalfExtents[0] * abs_r[0][1] + mHalfExtents[1] * abs_r[0][0]; rb = inBox.mHalfExtents[1] * abs_r[2][2] + inBox.mHalfExtents[2] * abs_r[1][2]; if (abs(rot(1, 3) * rot(0, 0) - rot(0, 3) * rot(1, 0)) > ra + rb) return false; // Test axis L = A2 x B1 ra = mHalfExtents[0] * abs_r[1][1] + mHalfExtents[1] * abs_r[1][0]; rb = inBox.mHalfExtents[0] * abs_r[2][2] + inBox.mHalfExtents[2] * abs_r[0][2]; if (abs(rot(1, 3) * rot(0, 1) - rot(0, 3) * rot(1, 1)) > ra + rb) return false; // Test axis L = A2 x B2 ra = mHalfExtents[0] * abs_r[2][1] + mHalfExtents[1] * abs_r[2][0]; rb = inBox.mHalfExtents[0] * abs_r[1][2] + inBox.mHalfExtents[1] * abs_r[0][2]; if (abs(rot(1, 3) * rot(0, 2) - rot(0, 3) * rot(1, 2)) > ra + rb) return false; // Since no separating axis is found, the OBBs must be intersecting return true; } } // JPH
3,615
712
# Copyright (C) 2018 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """The reinitialization experiment for Lenet 300-100 trained on MNIST.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from lottery_ticket.foundations import save_restore from lottery_ticket.mnist_fc import constants from lottery_ticket.mnist_fc import train as train_mnist import numpy as np def train(output_dir, mnist_location=constants.MNIST_LOCATION, training_len=constants.TRAINING_LEN, masks=None, initialization_distribution=None, same_sign=None): """Perform the reinitialization experiment. Using the masks from a previous run of the lottery ticket experiment, train a new, randomly reinitialized network. At most one of masks_location and masks_dictionary should be set. If both are None, then no masks are used. At most one of same_sign_location and same_sign_dictionary should be set. If both are None, then the same sign initialization strategy is not used. Args: output_dir: The directory to which the output should be written. mnist_location: The path to the NPZ file containing MNIST. training_len: How long to train the network. masks: The masks, if any, used to prune weights. Masks can come in one of three forms: * A dictionary of numpy arrays. Each dictionary key is the name of the corresponding tensor that is to be masked out. Each value is a numpy array containing the masks (1 for including a weight, 0 for excluding). * The string name of a directory containing one file for each mask (in the form of foundations.save_restore). * None, meaning the network should not be pruned. initialization_distribution: The distribution from which weights are sampled If the argument is None, the weights are samples from the default distribution. If the argument is a string, it is treated as the name of a directory whose filenams are layer names and whose entries are one-dimensional numpy arrays of weights. The weights for each layer are randomly sampled from these arrays. If the argument is anything else, it is treated as a dictionary whose keys are layer names and whose values are numpy arrays as described above. same_sign: Whether to ensure each weight is initialized to the same sign as the weight in the original network. Only applies when initialization is not None. If this argument is not None, then it contains the previous network weights that are used to determine the signs to which the new network should be initialized. This argument can be provided as a dictionary or string path in the same fashion as masks. """ masks = save_restore.standardize(masks) prev_weights = save_restore.standardize(same_sign) if initialization_distribution is None: presets = None else: initialization_distribution = save_restore.maybe_restore( initialization_distribution) # The preset weights should be randomly sampled from the values of # initialization. They should be the same shape as the masks. presets = {} for k, mask in masks.items(): init = initialization_distribution[k] # Weights have the same sign as those in the original networks. if prev_weights: positive = np.random.choice(init[init > 0], mask.shape) negative = np.random.choice(init[init < 0], mask.shape) presets[k] = np.where(prev_weights > 0, positive, negative) # Weights are randomly sampled. else: presets[k] = np.random.choice(init, mask.shape) train_mnist.train( output_dir=output_dir, mnist_location=mnist_location, training_len=training_len, presets=presets, masks=masks)
1,333
352
// ========================================================================= // Copyright 2020 T-Mobile, US // // Licensed under the Apache License, Version 2.0 (the "License") // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // See the readme.txt file for additional language around disclaimer of warranties. // ========================================================================= package com.tmobile.cso.vault.api.utils; import com.google.common.collect.ImmutableMap; import com.tmobile.cso.vault.api.exception.LogMessage; import org.apache.http.client.HttpClient; import org.apache.http.conn.ssl.NoopHostnameVerifier; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.LaxRedirectStrategy; import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.ssl.TrustStrategy; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.springframework.stereotype.Component; import java.security.KeyManagementException; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; @Component public class HttpUtils { private Logger log = LogManager.getLogger(HttpUtils.class); public HttpUtils() { // Auto-generated constructor stub } /** * To get HttpClinet for AAD rest api calls. * @return */ public HttpClient getHttpClient() { HttpClient httpClient = null; try { httpClient = HttpClientBuilder.create().setSSLHostnameVerifier( NoopHostnameVerifier.INSTANCE). setSSLContext( new SSLContextBuilder().loadTrustMaterial(null,new TrustStrategy() { @Override public boolean isTrusted(X509Certificate[] arg0, String arg1) throws CertificateException { return true; } }).build() ).setRedirectStrategy(new LaxRedirectStrategy()).build(); } catch (KeyManagementException | NoSuchAlgorithmException | KeyStoreException e1) { log.error(JSONUtil.getJSON(ImmutableMap.<String, String>builder(). put(LogMessage.USER, ThreadLocalContext.getCurrentMap().get(LogMessage.USER)). put(LogMessage.ACTION, "getHttpClient"). put(LogMessage.MESSAGE, "Failed to initialize httpClient"). put(LogMessage.APIURL, ThreadLocalContext.getCurrentMap().get(LogMessage.APIURL)). build())); } return httpClient; } }
879
419
<gh_stars>100-1000 { "ExportFieldValues": "Feldwerte exportieren", "AllObjectsWillBeExported": "Alle Objekte des ausgewählten Templates werden exportiert.", "OnlyObjectsUnderneathFolderPrefix": "Alle Objekte des ausgewählten Templates inner- und unterhalb des Ordners ", "OnlyObjectsUnderneathFolderPostfix": " werden exportiert.", "Template": "Template", "Fields": "Felder", "NoFieldsFound": "Es wurden keine Felder gefunden.", "SingleLineField": " (Textfeld)", "MultiLineField": " (Mehrzeiliges Textfeld)", "NumberField": " (Zahlenfeld)", "OptionField": " (Auswahlfeld)", "YouHaveToSelectATemplate": "Du musst ein Template auswählen.", "Export": "Exportieren", "Cancel": "Abbrechen", "ErrorOccured": "Ein Fehler ist aufgetreten." }
354
1,431
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hssf.record.chart; import static org.apache.poi.util.GenericRecordUtil.getBitsAsString; import static org.apache.poi.util.GenericRecordUtil.getEnumBitsAsString; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; import java.util.function.Supplier; import org.apache.poi.hssf.record.HSSFRecordTypes; import org.apache.poi.hssf.record.RecordInputStream; import org.apache.poi.hssf.record.StandardRecord; import org.apache.poi.util.BitField; import org.apache.poi.util.BitFieldFactory; import org.apache.poi.util.LittleEndianOutput; /** * The text record is used to define text stored on a chart. */ public final class TextRecord extends StandardRecord { public static final short sid = 0x1025; private static final BitField dataLabelPlacement = BitFieldFactory.getInstance(0x000F); private static final BitField autoColor = BitFieldFactory.getInstance(0x0001); private static final BitField showKey = BitFieldFactory.getInstance(0x0002); private static final BitField showValue = BitFieldFactory.getInstance(0x0004); private static final BitField vertical = BitFieldFactory.getInstance(0x0008); private static final BitField autoGeneratedText = BitFieldFactory.getInstance(0x0010); private static final BitField generated = BitFieldFactory.getInstance(0x0020); private static final BitField autoLabelDeleted = BitFieldFactory.getInstance(0x0040); private static final BitField autoBackground = BitFieldFactory.getInstance(0x0080); private static final BitField rotation = BitFieldFactory.getInstance(0x0700); private static final BitField showCategoryLabelAsPercentage = BitFieldFactory.getInstance(0x0800); private static final BitField showValueAsPercentage = BitFieldFactory.getInstance(0x1000); private static final BitField showBubbleSizes = BitFieldFactory.getInstance(0x2000); private static final BitField showLabel = BitFieldFactory.getInstance(0x4000); public static final byte HORIZONTAL_ALIGNMENT_LEFT = 1; public static final byte HORIZONTAL_ALIGNMENT_CENTER = 2; public static final byte HORIZONTAL_ALIGNMENT_BOTTOM = 3; public static final byte HORIZONTAL_ALIGNMENT_JUSTIFY = 4; public static final byte VERTICAL_ALIGNMENT_TOP = 1; public static final byte VERTICAL_ALIGNMENT_CENTER = 2; public static final byte VERTICAL_ALIGNMENT_BOTTOM = 3; public static final byte VERTICAL_ALIGNMENT_JUSTIFY = 4; public static final short DISPLAY_MODE_TRANSPARENT = 1; public static final short DISPLAY_MODE_OPAQUE = 2; public static final short ROTATION_NONE = 0; public static final short ROTATION_TOP_TO_BOTTOM = 1; public static final short ROTATION_ROTATED_90_DEGREES = 2; public static final short ROTATION_ROTATED_90_DEGREES_CLOCKWISE = 3; public static final short DATA_LABEL_PLACEMENT_CHART_DEPENDENT = 0; public static final short DATA_LABEL_PLACEMENT_OUTSIDE = 1; public static final short DATA_LABEL_PLACEMENT_INSIDE = 2; public static final short DATA_LABEL_PLACEMENT_CENTER = 3; public static final short DATA_LABEL_PLACEMENT_AXIS = 4; public static final short DATA_LABEL_PLACEMENT_ABOVE = 5; public static final short DATA_LABEL_PLACEMENT_BELOW = 6; public static final short DATA_LABEL_PLACEMENT_LEFT = 7; public static final short DATA_LABEL_PLACEMENT_RIGHT = 8; public static final short DATA_LABEL_PLACEMENT_AUTO = 9; public static final short DATA_LABEL_PLACEMENT_USER_MOVED = 10; private byte field_1_horizontalAlignment; private byte field_2_verticalAlignment; private short field_3_displayMode; private int field_4_rgbColor; private int field_5_x; private int field_6_y; private int field_7_width; private int field_8_height; private short field_9_options1; private short field_10_indexOfColorValue; private short field_11_options2; private short field_12_textRotation; public TextRecord() {} public TextRecord(TextRecord other) { super(other); field_1_horizontalAlignment = other.field_1_horizontalAlignment; field_2_verticalAlignment = other.field_2_verticalAlignment; field_3_displayMode = other.field_3_displayMode; field_4_rgbColor = other.field_4_rgbColor; field_5_x = other.field_5_x; field_6_y = other.field_6_y; field_7_width = other.field_7_width; field_8_height = other.field_8_height; field_9_options1 = other.field_9_options1; field_10_indexOfColorValue = other.field_10_indexOfColorValue; field_11_options2 = other.field_11_options2; field_12_textRotation = other.field_12_textRotation; } public TextRecord(RecordInputStream in) { field_1_horizontalAlignment = in.readByte(); field_2_verticalAlignment = in.readByte(); field_3_displayMode = in.readShort(); field_4_rgbColor = in.readInt(); field_5_x = in.readInt(); field_6_y = in.readInt(); field_7_width = in.readInt(); field_8_height = in.readInt(); field_9_options1 = in.readShort(); field_10_indexOfColorValue = in.readShort(); field_11_options2 = in.readShort(); field_12_textRotation = in.readShort(); } public void serialize(LittleEndianOutput out) { out.writeByte(field_1_horizontalAlignment); out.writeByte(field_2_verticalAlignment); out.writeShort(field_3_displayMode); out.writeInt(field_4_rgbColor); out.writeInt(field_5_x); out.writeInt(field_6_y); out.writeInt(field_7_width); out.writeInt(field_8_height); out.writeShort(field_9_options1); out.writeShort(field_10_indexOfColorValue); out.writeShort(field_11_options2); out.writeShort(field_12_textRotation); } protected int getDataSize() { return 1 + 1 + 2 + 4 + 4 + 4 + 4 + 4 + 2 + 2 + 2 + 2; } public short getSid() { return sid; } @Override public TextRecord copy() { return new TextRecord(this); } /** * Get the horizontal alignment field for the Text record. * * @return One of * HORIZONTAL_ALIGNMENT_LEFT * HORIZONTAL_ALIGNMENT_CENTER * HORIZONTAL_ALIGNMENT_BOTTOM * HORIZONTAL_ALIGNMENT_JUSTIFY */ public byte getHorizontalAlignment() { return field_1_horizontalAlignment; } /** * Set the horizontal alignment field for the Text record. * * @param field_1_horizontalAlignment * One of * HORIZONTAL_ALIGNMENT_LEFT * HORIZONTAL_ALIGNMENT_CENTER * HORIZONTAL_ALIGNMENT_BOTTOM * HORIZONTAL_ALIGNMENT_JUSTIFY */ public void setHorizontalAlignment(byte field_1_horizontalAlignment) { this.field_1_horizontalAlignment = field_1_horizontalAlignment; } /** * Get the vertical alignment field for the Text record. * * @return One of * VERTICAL_ALIGNMENT_TOP * VERTICAL_ALIGNMENT_CENTER * VERTICAL_ALIGNMENT_BOTTOM * VERTICAL_ALIGNMENT_JUSTIFY */ public byte getVerticalAlignment() { return field_2_verticalAlignment; } /** * Set the vertical alignment field for the Text record. * * @param field_2_verticalAlignment * One of * VERTICAL_ALIGNMENT_TOP * VERTICAL_ALIGNMENT_CENTER * VERTICAL_ALIGNMENT_BOTTOM * VERTICAL_ALIGNMENT_JUSTIFY */ public void setVerticalAlignment(byte field_2_verticalAlignment) { this.field_2_verticalAlignment = field_2_verticalAlignment; } /** * Get the display mode field for the Text record. * * @return One of * DISPLAY_MODE_TRANSPARENT * DISPLAY_MODE_OPAQUE */ public short getDisplayMode() { return field_3_displayMode; } /** * Set the display mode field for the Text record. * * @param field_3_displayMode * One of * DISPLAY_MODE_TRANSPARENT * DISPLAY_MODE_OPAQUE */ public void setDisplayMode(short field_3_displayMode) { this.field_3_displayMode = field_3_displayMode; } /** * Get the rgbColor field for the Text record. */ public int getRgbColor() { return field_4_rgbColor; } /** * Set the rgbColor field for the Text record. */ public void setRgbColor(int field_4_rgbColor) { this.field_4_rgbColor = field_4_rgbColor; } /** * Get the x field for the Text record. */ public int getX() { return field_5_x; } /** * Set the x field for the Text record. */ public void setX(int field_5_x) { this.field_5_x = field_5_x; } /** * Get the y field for the Text record. */ public int getY() { return field_6_y; } /** * Set the y field for the Text record. */ public void setY(int field_6_y) { this.field_6_y = field_6_y; } /** * Get the width field for the Text record. */ public int getWidth() { return field_7_width; } /** * Set the width field for the Text record. */ public void setWidth(int field_7_width) { this.field_7_width = field_7_width; } /** * Get the height field for the Text record. */ public int getHeight() { return field_8_height; } /** * Set the height field for the Text record. */ public void setHeight(int field_8_height) { this.field_8_height = field_8_height; } /** * Get the options1 field for the Text record. */ public short getOptions1() { return field_9_options1; } /** * Set the options1 field for the Text record. */ public void setOptions1(short field_9_options1) { this.field_9_options1 = field_9_options1; } /** * Get the index of color value field for the Text record. */ public short getIndexOfColorValue() { return field_10_indexOfColorValue; } /** * Set the index of color value field for the Text record. */ public void setIndexOfColorValue(short field_10_indexOfColorValue) { this.field_10_indexOfColorValue = field_10_indexOfColorValue; } /** * Get the options2 field for the Text record. */ public short getOptions2() { return field_11_options2; } /** * Set the options2 field for the Text record. */ public void setOptions2(short field_11_options2) { this.field_11_options2 = field_11_options2; } /** * Get the text rotation field for the Text record. */ public short getTextRotation() { return field_12_textRotation; } /** * Set the text rotation field for the Text record. */ public void setTextRotation(short field_12_textRotation) { this.field_12_textRotation = field_12_textRotation; } /** * Sets the auto color field value. * true = automaticly selected colour, false = user-selected */ public void setAutoColor(boolean value) { field_9_options1 = autoColor.setShortBoolean(field_9_options1, value); } /** * true = automaticly selected colour, false = user-selected * @return the auto color field value. */ public boolean isAutoColor() { return autoColor.isSet(field_9_options1); } /** * Sets the show key field value. * true = draw legend */ public void setShowKey(boolean value) { field_9_options1 = showKey.setShortBoolean(field_9_options1, value); } /** * true = draw legend * @return the show key field value. */ public boolean isShowKey() { return showKey.isSet(field_9_options1); } /** * Sets the show value field value. * false = text is category label */ public void setShowValue(boolean value) { field_9_options1 = showValue.setShortBoolean(field_9_options1, value); } /** * false = text is category label * @return the show value field value. */ public boolean isShowValue() { return showValue.isSet(field_9_options1); } /** * Sets the vertical field value. * true = text is vertical */ public void setVertical(boolean value) { field_9_options1 = vertical.setShortBoolean(field_9_options1, value); } /** * true = text is vertical * @return the vertical field value. */ public boolean isVertical() { return vertical.isSet(field_9_options1); } /** * Sets the auto generated text field value. * */ public void setAutoGeneratedText(boolean value) { field_9_options1 = autoGeneratedText.setShortBoolean(field_9_options1, value); } /** * * @return the auto generated text field value. */ public boolean isAutoGeneratedText() { return autoGeneratedText.isSet(field_9_options1); } /** * Sets the generated field value. * */ public void setGenerated(boolean value) { field_9_options1 = generated.setShortBoolean(field_9_options1, value); } /** * * @return the generated field value. */ public boolean isGenerated() { return generated.isSet(field_9_options1); } /** * Sets the auto label deleted field value. * */ public void setAutoLabelDeleted(boolean value) { field_9_options1 = autoLabelDeleted.setShortBoolean(field_9_options1, value); } /** * * @return the auto label deleted field value. */ public boolean isAutoLabelDeleted() { return autoLabelDeleted.isSet(field_9_options1); } /** * Sets the auto background field value. * */ public void setAutoBackground(boolean value) { field_9_options1 = autoBackground.setShortBoolean(field_9_options1, value); } /** * * @return the auto background field value. */ public boolean isAutoBackground() { return autoBackground.isSet(field_9_options1); } /** * Sets the rotation field value. * */ public void setRotation(short value) { field_9_options1 = rotation.setShortValue(field_9_options1, value); } /** * * @return the rotation field value. */ public short getRotation() { return rotation.getShortValue(field_9_options1); } /** * Sets the show category label as percentage field value. * */ public void setShowCategoryLabelAsPercentage(boolean value) { field_9_options1 = showCategoryLabelAsPercentage.setShortBoolean(field_9_options1, value); } /** * * @return the show category label as percentage field value. */ public boolean isShowCategoryLabelAsPercentage() { return showCategoryLabelAsPercentage.isSet(field_9_options1); } /** * Sets the show value as percentage field value. * */ public void setShowValueAsPercentage(boolean value) { field_9_options1 = showValueAsPercentage.setShortBoolean(field_9_options1, value); } /** * * @return the show value as percentage field value. */ public boolean isShowValueAsPercentage() { return showValueAsPercentage.isSet(field_9_options1); } /** * Sets the show bubble sizes field value. * */ public void setShowBubbleSizes(boolean value) { field_9_options1 = showBubbleSizes.setShortBoolean(field_9_options1, value); } /** * * @return the show bubble sizes field value. */ public boolean isShowBubbleSizes() { return showBubbleSizes.isSet(field_9_options1); } /** * Sets the show label field value. * */ public void setShowLabel(boolean value) { field_9_options1 = showLabel.setShortBoolean(field_9_options1, value); } /** * * @return the show label field value. */ public boolean isShowLabel() { return showLabel.isSet(field_9_options1); } /** * Sets the data label placement field value. * */ public void setDataLabelPlacement(short value) { field_11_options2 = dataLabelPlacement.setShortValue(field_11_options2, value); } /** * * @return the data label placement field value. */ public short getDataLabelPlacement() { return dataLabelPlacement.getShortValue(field_11_options2); } @Override public HSSFRecordTypes getGenericRecordType() { return HSSFRecordTypes.TEXT; } @Override public Map<String, Supplier<?>> getGenericProperties() { final Map<String,Supplier<?>> m = new LinkedHashMap<>(); m.put("horizontalAlignment", getEnumBitsAsString(this::getHorizontalAlignment, new int[]{HORIZONTAL_ALIGNMENT_LEFT, HORIZONTAL_ALIGNMENT_CENTER, HORIZONTAL_ALIGNMENT_BOTTOM, HORIZONTAL_ALIGNMENT_JUSTIFY}, new String[]{"LEFT","CENTER","BOTTOM","JUSTIFY"})); m.put("verticalAlignment", getEnumBitsAsString(this::getVerticalAlignment, new int[]{VERTICAL_ALIGNMENT_TOP, VERTICAL_ALIGNMENT_CENTER, VERTICAL_ALIGNMENT_BOTTOM, VERTICAL_ALIGNMENT_JUSTIFY}, new String[]{"TOP", "CENTER", "BOTTOM", "JUSTIFY"})); m.put("displayMode", getEnumBitsAsString(this::getDisplayMode, new int[]{DISPLAY_MODE_TRANSPARENT,DISPLAY_MODE_OPAQUE}, new String[]{"TRANSPARENT","OPAQUE"})); m.put("rgbColor", this::getRgbColor); m.put("x", this::getX); m.put("y", this::getY); m.put("width", this::getWidth); m.put("height", this::getHeight); m.put("options1", getBitsAsString(this::getOptions1, new BitField[]{autoColor, showKey, showValue, vertical, autoGeneratedText, generated, autoLabelDeleted, autoBackground, showCategoryLabelAsPercentage, showValueAsPercentage, showBubbleSizes, showLabel}, new String[]{"AUTO_COLOR", "SHOW_KEY", "SHOW_VALUE", "VERTICAL", "AUTO_GENERATED_TEXT", "GENERATED", "AUTO_LABEL_DELETED", "AUTO_BACKGROUND", "SHOW_CATEGORY_LABEL_AS_PERCENTAGE", "SHOW_VALUE_AS_PERCENTAGE", "SHOW_BUBBLE_SIZES", "SHOW_LABEL"})); m.put("rotation", getEnumBitsAsString(this::getRotation, new int[]{ROTATION_NONE, ROTATION_TOP_TO_BOTTOM, ROTATION_ROTATED_90_DEGREES, ROTATION_ROTATED_90_DEGREES_CLOCKWISE}, new String[]{"NONE", "TOP_TO_BOTTOM", "ROTATED_90_DEGREES", "ROTATED_90_DEGREES_CLOCKWISE"})); m.put("options2", this::getOptions2); m.put("dataLabelPlacement", getEnumBitsAsString(this::getDataLabelPlacement, new int[]{DATA_LABEL_PLACEMENT_CHART_DEPENDENT, DATA_LABEL_PLACEMENT_OUTSIDE, DATA_LABEL_PLACEMENT_INSIDE, DATA_LABEL_PLACEMENT_CENTER, DATA_LABEL_PLACEMENT_AXIS, DATA_LABEL_PLACEMENT_ABOVE, DATA_LABEL_PLACEMENT_BELOW, DATA_LABEL_PLACEMENT_LEFT, DATA_LABEL_PLACEMENT_RIGHT, DATA_LABEL_PLACEMENT_AUTO, DATA_LABEL_PLACEMENT_USER_MOVED}, new String[]{"CHART_DEPENDENT", "OUTSIDE", "INSIDE", "CENTER", "AXIS", "ABOVE", "BELOW", "LEFT", "RIGHT", "AUTO", "USER_MOVED"})); m.put("indexOfColorValue", this::getIndexOfColorValue); m.put("textRotation", this::getTextRotation); return Collections.unmodifiableMap(m); } }
9,062
343
#!python # Copyright 2012 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Creates a GypTests object that encapsulates all unittests defined in unittests.gypi.""" import os.path import sys _SYZYGY_DIR = os.path.abspath(os.path.dirname(__file__) + '/..') _SYZYGY_GYP = os.path.join(_SYZYGY_DIR, 'syzygy.gyp') _SCRIPT_DIR = os.path.join(_SYZYGY_DIR, 'py') if _SCRIPT_DIR not in sys.path: sys.path.insert(0, _SCRIPT_DIR) import test_utils.gyp_tests as gyp_tests # pylint: disable=F0401 def MakeTest(): return gyp_tests.GypTests(gyp_path=_SYZYGY_GYP) if __name__ == '__main__': sys.exit(MakeTest().Main())
393
945
/*========================================================================= * * Copyright NumFOCUS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *=========================================================================*/ #ifndef itkFEMLinearSystemWrapper_h #define itkFEMLinearSystemWrapper_h #include "itkMacro.h" #include "itkFEMSolution.h" #include "itkFEMException.h" #include "ITKFEMExport.h" #include <vector> #include <typeinfo> #include <string> namespace itk { namespace fem { /** * \class LinearSystemWrapper * \brief Defines all functions required by Solver class to allocate, * assemble and solve a linear system of equation. * * Linear system is defined as A*x=B, where A is a square matrix and F * is a vector. Member functions are provided to access a specific element * within A and B. Objects of derived classes should make appropriate calls * to the numeric library in implementation of virtual functions to assemble * and solve the linear system. * * See comments for each virtual member for more information about how to * derive a new LinearSystemWrapper class. An example derived class * LinearSystemWrapperVNL is defined to use VNL sparse matrix representation * and solver. * * \sa Solver::SetLinearSystemWrapper * \ingroup ITKFEM */ class ITKFEM_EXPORT LinearSystemWrapper : public Solution { public: ITK_DISALLOW_COPY_AND_MOVE(LinearSystemWrapper); using Self = LinearSystemWrapper; using Superclass = Solution; using Pointer = Self *; using ConstPointer = const Self *; using ColumnArray = std::vector<unsigned int>; /** * Constructor for linear system, should perform any initialization that * is required by derived class. */ LinearSystemWrapper() = default; /* , m_PrimaryMatrixSetupFunction(0), m_PrimaryVectorSetupFunction(0), m_PrimarySolutionSetupFunction(0) {} */ /** * Virtual destructor should properly destroy the object and clean up any * memory allocated for matrix and vector storage. */ ~LinearSystemWrapper() override; /** * Clear all the data (matrices) inside the system, so that the system * is ready to solve another problem from scratch. */ virtual void Clean(); /** * Set the order of the system. All matrices will be of size NxN and * all vectors will be of size N * \param N order of the linear system */ void SetSystemOrder(unsigned int N) { m_Order = N; } /** * Get the order of the system */ unsigned int GetSystemOrder() const { return m_Order; } /** * Set Index of matrices used by the system * \param nMatrices Index of matrices used by system */ void SetNumberOfMatrices(unsigned int nMatrices) { m_NumberOfMatrices = nMatrices; } /* * Set the maximum number of entries permitted in a matrix * \param matrixIndex index of matrix to set value for * \param maxNonZeros maximum number of entries allowed in matrix * \note in general this function does nothing, however it may * redefined by the derived wrapper if necessary */ // virtual void SetMaximumNonZeroValuesInMatrix(unsigned int maxNonZeroValues) // = 0; /** * Get Index of matrices used by system */ unsigned int GetNumberOfMatrices() const { return m_NumberOfMatrices; } /** * Set Index of vectors used by the system * \param nVectors Index of vectors used by system */ void SetNumberOfVectors(unsigned int nVectors) { m_NumberOfVectors = nVectors; } /** * Get Index of vectors used by system */ unsigned int GetNumberOfVectors() const { return m_NumberOfVectors; } /** * Set Index of solutions used by the system * \param nSolutions Index of solutions used by system */ void SetNumberOfSolutions(unsigned int nSolutions) { m_NumberOfSolutions = nSolutions; } /** * Get Index of solutions used by system */ unsigned int GetNumberOfSolutions() const { return m_NumberOfSolutions; } /** * Initialization of the A matrix. First any existing data for matrix A * must be be destroyed, and then a new matrix is created in the memory. All * elements in A must be set to zero. * * \param matrixIndex index of matrix to initialize */ virtual void InitializeMatrix(unsigned int matrixIndex = 0) = 0; /** * Check to see if matrix is initialized * \param matrixIndex index of matrix to examine */ virtual bool IsMatrixInitialized(unsigned int matrixIndex = 0) = 0; /** * Free the memory from a matrix * \param matrixIndex index of matrix to destroy */ virtual void DestroyMatrix(unsigned int matrixIndex = 0) = 0; /** * Initialization of the a vector. First any existing data for vector B * must be destroyed, then new vector is created in the memory. All * elements in B must be set to zero. * */ virtual void InitializeVector(unsigned int vectorIndex = 0) = 0; /** * Check to see if vector is initialized * \param vectorIndex vector of index to examine */ virtual bool IsVectorInitialized(unsigned int vectorIndex = 0) = 0; /** * Free the memory from a vector * \param vectorIndex index of vector to destroy */ virtual void DestroyVector(unsigned int vectorIndex = 0) = 0; /** * Initialization of a solution vector. Existing memory must be destroyed * and the new solution vector is created in memory. All values should * be set to zero. * \param solutionIndex index of solution vector to initialize */ virtual void InitializeSolution(unsigned int solutionIndex = 0) = 0; /** * Check to see if solution vector is initialized * \param solutionIndex index of solution vector to examine */ virtual bool IsSolutionInitialized(unsigned int solutionIndex = 0) = 0; /** Free the memory from a solution vector * \param solutionIndex index of solution vector to destroy */ virtual void DestroySolution(unsigned int solutionIndex = 0) = 0; /** * Virtual function to get a value of a specific element of a matrix. * \param i row of the element * \param j column of the element * \param matrixIndex index of matrix to get value from */ virtual Float GetMatrixValue(unsigned int i, unsigned int j, unsigned int matrixIndex = 0) const = 0; /** * Virtual function to set a value of a specific element of the A matrix. * \param i row of the element * \param j column of the element * \param value new value of the element * \param matrixIndex index of matrix to set value in */ virtual void SetMatrixValue(unsigned int i, unsigned int j, Float value, unsigned int matrixIndex = 0) = 0; /** * Virtual function to add a value to a specific element of the A matrix. * \param i row of the element * \param j column of the element * \param value value to add to the existing element * \param matrixIndex index of matrix to add value to */ virtual void AddMatrixValue(unsigned int i, unsigned int j, Float value, unsigned int matrixIndex = 0) = 0; /** * Returns the column index (zero based) of the i-th non zero * (non allocated)element in a given row of A matrix. This function * is useful for optimizations when sparse matrices are used. Note * that the value of an element with returned column index may actually * be equal zero. * \param row Row number * \param cols Which element in that row. Can range from 0 to number of * elements allocated in a row. If this is out of range, the * function returns -1. * \param matrixIndex Index of matrix (defaults to 0) */ virtual void GetColumnsOfNonZeroMatrixElementsInRow(unsigned int row, ColumnArray & cols, unsigned int matrixIndex = 0); /** * Virtual function to get a value of a specific element of the B vector. * \param i row of the element * \param vectorIndex index of vector to get value from */ virtual Float GetVectorValue(unsigned int i, unsigned int vectorIndex = 0) const = 0; /** * Virtual function to set a value of a specific element of the B vector. * \param i row of the element * \param value new value of the element * \param vectorIndex index of vector to set value in */ virtual void SetVectorValue(unsigned int i, Float value, unsigned int vectorIndex = 0) = 0; /** * Virtual function to add a value to a specific element of the B vector. * \param i row of the element * \param value value to add to the existing element * \param vectorIndex index of vector to add value to */ virtual void AddVectorValue(unsigned int i, Float value, unsigned int vectorIndex = 0) = 0; /** * Virtual function to set a value of specific element of the solution * vector. * \param i element Index in solution vector * \param value new value of the element * \param solutionIndex index of solution vector to set value in */ virtual void SetSolutionValue(unsigned int i, Float value, unsigned int solutionIndex = 0) = 0; /** * Virtual function to add a value of specific element of the solution * vector. * \param i element Index in solution vector * \param value new value of the element * \param solutionIndex index of solution vector to add value to */ virtual void AddSolutionValue(unsigned int i, Float value, unsigned int solutionIndex = 0) = 0; /** * Solves the linear system and creates the solution vector, which can later * be accessed via GetSolutionValue(i,SolutionIndex) member function. Here all the major processing is * done with calls to external numeric library. * \note This function can only be called after the linear system was * properly assembled. */ virtual void Solve() = 0; /** * Swaps access indices of any 2 matrices in the linear system * \param matrixIndex1 index of a matrix to swap * \param matrixIndex2 index of matrix to swap with */ virtual void SwapMatrices(unsigned int matrixIndex1, unsigned int matrixIndex2) = 0; /** * Copies the content of source matrix to destination matrix. Any existing * data in destination matrix is overwritten. * * \param matrixIndex1 index of a matrix that will be copied * \param matrixIndex2 index of matrix to copy to */ virtual void CopyMatrix(unsigned int matrixIndex1, unsigned int matrixIndex2); /** * Swaps access indices of any 2 vectors in the linear system * \param vectorIndex1 index of a vector to swap * \param vectorIndex2 index of vector to swap with */ virtual void SwapVectors(unsigned int vectorIndex1, unsigned int vectorIndex2) = 0; /** * Swaps access indices of any 2 solution vectors in the linear system * \param solutionIndex1 index of a solution vector to swap * \param solutionIndex2 index of solution vector to swap with */ virtual void SwapSolutions(unsigned int solutionIndex1, unsigned int solutionIndex2) = 0; /** * Multiplies all elements of a matrix by a scalar * \param scale scalar to multiply all matrix values by * \param matrixIndex index of matrix to modify */ virtual void ScaleMatrix(Float scale, unsigned int matrixIndex = 0); /** * Multiplies all elements of a vector by a scalar * \param scale scalar to multiply all vector values by * \param vectorIndex index of vector to modify */ void ScaleVector(Float scale, unsigned int vectorIndex = 0); /** * Multiplies all elements of a solution by a scalar * \param scale scalar to multiply all solution values by * \param solutionIndex index of solution to modify */ void ScaleSolution(Float scale, unsigned int solutionIndex = 0); /** * Perform a matrix*matrix operation and store the result in the linear system * \param leftMatrixIndex index of left matrix * \param rightMatrixIndex index of right matrix * \param resultMatrixIndex index of matrix where solution is stored */ virtual void MultiplyMatrixMatrix(unsigned int resultMatrixIndex, unsigned int leftMatrixIndex, unsigned int rightMatrixIndex) = 0; /** * Adds two matrices storing the result in the first matrix. * * \param matrixIndex1 index of a matrix to add the other matrix to * \param matrixIndex2 index of matrix to add */ virtual void AddMatrixMatrix(unsigned int matrixIndex1, unsigned int matrixIndex2); /** * Adds two vectors storing the result in the first vector. * * \param vectorIndex1 index of a vector to add the other vector to * \param vectorIndex2 index of vector to add */ virtual void AddVectorVector(unsigned int vectorIndex1, unsigned int vectorIndex2); /** * Perform a matrix*vector operation and store the result in the linear system * \param matrixIndex index of matrix to multiply * \param vectorIndex index of vector to multiply * \param resultVectorIndex index of vector where result is store */ virtual void MultiplyMatrixVector(unsigned int resultVectorIndex, unsigned int matrixIndex, unsigned int vectorIndex); /** * Perform a matrix*solution operation and store the result in the linear system * \param matrixIndex index of matrix to multiply * \param solutionIndex index of solution to multiply * \param resultVectorIndex index of vector where result is store */ virtual void MultiplyMatrixSolution(unsigned int resultVectorIndex, unsigned int matrixIndex, unsigned int solutionIndex); /** * Copy a solution vector to a vector * \param solutionIndex index of solution vector to copy * \param vectorIndex index of vector to copy solution to */ virtual void CopySolution2Vector(unsigned int solutionIndex, unsigned int vectorIndex) = 0; /** * Copy a vector to a solution vector * \param vectorIndex index of a vector to copy * \param solutionIndex index of a solution to copy the solution to */ virtual void CopyVector2Solution(unsigned int vectorIndex, unsigned int solutionIndex) = 0; /** * Copy a vector * \param vectorSource index of a vector to copy * \param vectorDestination index to copy the vector to */ virtual void CopyVector(unsigned int vectorSource, unsigned int vectorDestination); /** * Remove all zeros from a matrix * \param matrixIndex index of matrix to remove zeros from * \param tempMatrixIndex index of matrix to use for temp storage space * \note an extra matrix must be allocated by the solver in order to use this method */ virtual void OptimizeMatrixStorage(unsigned int matrixIndex, unsigned int tempMatrixIndex); /** * Reorder the Degrees of Freedom in order to reduce bandwidth of matrix * \param matrixIndex index of matrix to examine * \param newNumbering vector of new degree of freedom ordering */ virtual void ReverseCuthillMckeeOrdering(ColumnArray & newNumbering, unsigned int matrixIndex = 0); protected: /** Order of linear system */ unsigned int m_Order{ 0 }; /** * Number of matrices used by system */ unsigned int m_NumberOfMatrices{ 1 }; /** * Number of vectors used by system */ unsigned int m_NumberOfVectors{ 1 }; /** * Number of solutions used by system */ unsigned int m_NumberOfSolutions{ 1 }; /* * Function used to prepare primary matrix for numerical solving */ // void (*m_PrimaryMatrixSetupFunction)(LinearSystemWrapper *lsw); /* * Function used to prepare primary vector for numerical solving */ /* void (*m_PrimaryVectorSetupFunction)(LinearSystemWrapper *lsw);*/ /* * Function used to prepare primary matrix for numerical solving */ /* void (*m_PrimarySolutionSetupFunction)(LinearSystemWrapper *lsw); */ private: /** * matrix reordering utility */ void CuthillMckeeOrdering(ColumnArray & newNumbering, int startingRow, unsigned int matrixIndex = 0); void FollowConnectionsCuthillMckeeOrdering(unsigned int rowNumber, ColumnArray & rowDegree, ColumnArray & newNumbering, unsigned int nextRowNumber, unsigned int matrixIndex = 0); }; class ITK_ABI_EXPORT FEMExceptionLinearSystem : public FEMException { public: /** * Constructor. In order to construct this exception object, four parameters * must be provided: file, lineNumber, location and a detailed description * of the exception. */ FEMExceptionLinearSystem(const char * file, unsigned int lineNumber, std::string location, std::string moreDescription); /** Virtual destructor needed for subclasses. Has to have empty throw(). */ ~FEMExceptionLinearSystem() noexcept override; /** Type related information. */ itkTypeMacro(FEMExceptionLinearSystem, FEMException); }; class ITK_ABI_EXPORT FEMExceptionLinearSystemBounds : public FEMException { public: /** * Constructor. In order to construct this exception object, five parameters * must be provided: file, lineNumber, location and a detailed description * of the exception, and the invalid index */ FEMExceptionLinearSystemBounds(const char * file, unsigned int lineNumber, std::string location, std::string moreDescription, unsigned int index1); /** * Constructor. In order to construct this exception object, six parameters * must be provided: file, lineNumber, location and a detailed description * of the exception, the first index, and the second index */ FEMExceptionLinearSystemBounds(const char * file, unsigned int lineNumber, std::string location, std::string moreDescription, unsigned int index1, unsigned int index2); /** Virtual destructor needed for subclasses. Has to have empty throw(). */ ~FEMExceptionLinearSystemBounds() noexcept override; /** Type related information. */ itkTypeMacro(FEMExceptionLinearSystem, FEMException); }; } // end namespace fem } // end namespace itk #endif // itkFEMLinearSystemWrapper_h
5,755
778
<filename>modUtil/src/main/java/org/aion/util/InternalTransactionUtil.java<gh_stars>100-1000 package org.aion.util; import java.util.ArrayList; import java.util.List; import org.aion.types.InternalTransaction; public class InternalTransactionUtil { /** * Method that creates an identical copy of the original InternalTransaction except it is marked * as REJECTED * * @param original The Internal Transaction we were given. * @return The new Rejected Transaction instance. */ public static InternalTransaction createRejectedTransaction(InternalTransaction original) { if (original.isCreate) { return InternalTransaction.contractCreateTransaction( InternalTransaction.RejectedStatus.REJECTED, original.sender, original.senderNonce, original.value, original.copyOfData(), original.energyLimit, original.energyPrice); } else { return InternalTransaction.contractCallTransaction( InternalTransaction.RejectedStatus.REJECTED, original.sender, original.destination, original.senderNonce, original.value, original.copyOfData(), original.energyLimit, original.energyPrice); } } public static List<InternalTransaction> createRejectedTransactionList( List<InternalTransaction> transactions) { List<InternalTransaction> rejectedInternalTransactions = new ArrayList<>(); for (InternalTransaction transaction : transactions) { rejectedInternalTransactions.add( InternalTransactionUtil.createRejectedTransaction(transaction)); } return rejectedInternalTransactions; } }
812
7,482
/* * Copyright (c) 2020-2021, Bluetrum Development Team * * SPDX-License-Identifier: Apache-2.0 * * Change Logs: * Date Author Notes * 2021-01-07 greedyhao first version */ #ifndef __DRV_I2C__ #define __DRV_I2C__ #include <rtthread.h> #include <rthw.h> #include <rtdevice.h> /* ab32 config class */ struct ab32_soft_i2c_config { rt_uint8_t scl; rt_uint8_t sda; rt_uint8_t sda_mode; rt_uint8_t scl_mode; const char *bus_name; }; /* ab32 i2c dirver class */ struct ab32_i2c { struct rt_i2c_bit_ops ops; struct rt_i2c_bus_device i2c2_bus; }; #ifdef BSP_USING_I2C1 #define I2C1_BUS_CONFIG \ { \ .scl = BSP_I2C1_SCL_PIN, \ .sda = BSP_I2C1_SDA_PIN, \ .bus_name = "i2c1", \ } #endif #ifdef BSP_USING_I2C2 #define I2C2_BUS_CONFIG \ { \ .scl = BSP_I2C2_SCL_PIN, \ .sda = BSP_I2C2_SDA_PIN, \ .bus_name = "i2c2", \ } #endif #ifdef BSP_USING_I2C3 #define I2C3_BUS_CONFIG \ { \ .scl = BSP_I2C3_SCL_PIN, \ .sda = BSP_I2C3_SDA_PIN, \ .bus_name = "i2c3", \ } #endif #ifdef BSP_USING_I2C4 #define I2C4_BUS_CONFIG \ { \ .scl = BSP_I2C4_SCL_PIN, \ .sda = BSP_I2C4_SDA_PIN, \ .bus_name = "i2c4", \ } #endif int rt_hw_i2c_init(void); #endif
1,421
794
package github.tornaco.thanox.android.server.patch.framework; public interface IThanoxHook { void installHooks(boolean isSystemServer); }
46
3,667
<reponame>jsoberg/andOTP /* * Copyright (C) 2017-2020 <NAME> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.shadowice.flocke.andotp.Utilities; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.Uri; import android.widget.Toast; import com.google.zxing.BarcodeFormat; import com.google.zxing.BinaryBitmap; import com.google.zxing.ChecksumException; import com.google.zxing.DecodeHintType; import com.google.zxing.FormatException; import com.google.zxing.LuminanceSource; import com.google.zxing.NotFoundException; import com.google.zxing.RGBLuminanceSource; import com.google.zxing.ReaderException; import com.google.zxing.Result; import com.google.zxing.common.HybridBinarizer; import com.google.zxing.qrcode.QRCodeReader; import org.shadowice.flocke.andotp.R; import java.io.IOException; import java.util.EnumMap; import java.util.Map; import java.util.Vector; public class ScanQRCodeFromFile { private static final Map<DecodeHintType, Object> HINTS; private static final Map<DecodeHintType, Object> HINTS_HARDER; static { Vector<BarcodeFormat> barcodeFormats = new Vector<>(); barcodeFormats.add(BarcodeFormat.QR_CODE); HINTS = new EnumMap<>(DecodeHintType.class); HINTS.put(DecodeHintType.POSSIBLE_FORMATS, barcodeFormats); HINTS_HARDER = new EnumMap<>(HINTS); HINTS_HARDER.put(DecodeHintType.TRY_HARDER, Boolean.TRUE); } public static String scanQRImage(Context context, Uri uri) { //Check if external storage is accessible if (!Tools.isExternalStorageReadable()) { Toast.makeText(context, R.string.backup_toast_storage_not_accessible, Toast.LENGTH_LONG).show(); return null; } //Get image in bytes byte[] imageInBytes; try { imageInBytes = StorageAccessHelper.loadFile(context, uri); } catch (IOException e) { e.printStackTrace(); Toast.makeText(context, R.string.toast_file_load_error, Toast.LENGTH_LONG).show(); return null; } Bitmap bMap = BitmapFactory.decodeByteArray(imageInBytes, 0, imageInBytes.length); String contents = null; int[] intArray = new int[bMap.getWidth() * bMap.getHeight()]; bMap.getPixels(intArray, 0, bMap.getWidth(), 0, 0, bMap.getWidth(), bMap.getHeight()); LuminanceSource source = new RGBLuminanceSource(bMap.getWidth(), bMap.getHeight(), intArray); BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source)); Result result = null; QRCodeReader reader = new QRCodeReader(); ReaderException savedException = null; try { //Try finding QR code result = reader.decode(bitmap, HINTS); contents = result.getText(); } catch (ReaderException re) { savedException = re; } if (contents == null) { try { //Try finding QR code really hard result = reader.decode(bitmap, HINTS_HARDER); contents = result.getText(); } catch (ReaderException re) { savedException = re; } } if (contents == null) { try { throw savedException == null ? NotFoundException.getNotFoundInstance() : savedException; } catch (ChecksumException e) { e.printStackTrace(); Toast.makeText(context, R.string.toast_qr_checksum_exception, Toast.LENGTH_LONG).show(); } catch (FormatException e) { e.printStackTrace(); Toast.makeText(context, R.string.toast_qr_format_error, Toast.LENGTH_LONG).show(); } catch (ReaderException e) { // Including NotFoundException e.printStackTrace(); Toast.makeText(context, R.string.toast_qr_error, Toast.LENGTH_LONG).show(); } } //Return QR code (if found) return contents; } }
2,036
739
#include "Crinkler.h" #include "../Compressor/Compressor.h" #include "Fix.h" #include <set> #include <ctime> #include <ppl.h> #include "HunkList.h" #include "Hunk.h" #include "CoffObjectLoader.h" #include "CoffLibraryLoader.h" #include "ImportHandler.h" #include "Log.h" #include "HeuristicHunkSorter.h" #include "ExplicitHunkSorter.h" #include "EmpiricalHunkSorter.h" #include "misc.h" #include "data.h" #include "Symbol.h" #include "HtmlReport.h" #include "NameMangling.h" #include "MemoryFile.h" using namespace std; static int PreviousPrime(int n) { in: n = (n - 2) | 1; for (int i = 3; i * i < n; i += 2) { if (n / i * i == n) goto in; } return n; } static void VerboseLabels(CompressionReportRecord* csr) { if(csr->type & RECORD_ROOT) { printf("\nlabel name pos comp-pos size compsize"); } else { string strippedName = StripCrinklerSymbolPrefix(csr->name.c_str()); if(csr->type & RECORD_SECTION) printf("\n%-38.38s", strippedName.c_str()); else if(csr->type & RECORD_OLD_SECTION) printf(" %-36.36s", strippedName.c_str()); else if(csr->type & RECORD_PUBLIC) printf(" %-34.34s", strippedName.c_str()); else printf(" %-32.32s", strippedName.c_str()); if(csr->compressedPos >= 0) printf(" %9d %8.2f %9d %8.2f\n", csr->pos, csr->compressedPos / (BIT_PRECISION *8.0f), csr->size, csr->compressedSize / (BIT_PRECISION *8.0f)); else printf(" %9d %9d\n", csr->pos, csr->size); } for(CompressionReportRecord* record : csr->children) VerboseLabels(record); } static void ProgressUpdateCallback(void* userData, int n, int max) { ProgressBar* progressBar = (ProgressBar*)userData; progressBar->Update(n, max); } static void NotCrinklerFileError() { Log::Error("", "Input file is not a Crinkler compressed executable"); } Crinkler::Crinkler(): m_subsystem(SUBSYSTEM_WINDOWS), m_hashsize(100*1024*1024), m_compressionType(COMPRESSION_FAST), m_reuseType(REUSE_OFF), m_useSafeImporting(true), m_hashtries(0), m_hunktries(0), m_printFlags(0), m_showProgressBar(false), m_useTinyHeader(false), m_useTinyImport(false), m_summaryFilename(""), m_truncateFloats(false), m_truncateBits(64), m_overrideAlignments(false), m_unalignCode(false), m_alignmentBits(0), m_runInitializers(1), m_largeAddressAware(0), m_saturate(0), m_stripExports(false) { InitCompressor(); m_modellist1 = InstantModels4k(); m_modellist2 = InstantModels4k(); } Crinkler::~Crinkler() { } void Crinkler::ReplaceDlls(HunkList& hunklist) { set<string> usedDlls; // Replace DLL for(int i = 0; i < hunklist.GetNumHunks(); i++) { Hunk* hunk = hunklist[i]; if(hunk->GetFlags() & HUNK_IS_IMPORT) { map<string, string>::iterator it = m_replaceDlls.find(ToLower(hunk->GetImportDll())); if(it != m_replaceDlls.end()) { hunk->SetImportDll(it->second.c_str()); usedDlls.insert(it->first); } } } // Warn about unused replace DLLs for(const auto& p : m_replaceDlls) { if(usedDlls.find(p.first) == usedDlls.end()) { Log::Warning("", "No functions were imported from replaced dll '%s'", p.first.c_str()); } } } void Crinkler::OverrideAlignments(HunkList& hunklist) { for(int i = 0; i < hunklist.GetNumHunks(); i++) { Hunk* hunk = hunklist[i]; hunk->OverrideAlignment(m_alignmentBits); } } void Crinkler::Load(const char* filename) { HunkList* hunkList = m_hunkLoader.LoadFromFile(filename); if(hunkList) { m_hunkPool.Append(hunkList); delete hunkList; } else { Log::Error(filename, "Unsupported file type"); } } void Crinkler::Load(const char* data, int size, const char* module) { HunkList* hunkList = m_hunkLoader.Load(data, size, module); m_hunkPool.Append(hunkList); delete hunkList; } void Crinkler::AddRuntimeLibrary() { // Add minimal console entry point HunkList* runtime = m_hunkLoader.Load(runtimeObj, int(runtimeObj_end - runtimeObj), "runtime"); m_hunkPool.Append(runtime); delete runtime; // Add imports from msvcrt HunkList* hunklist = new HunkList; ForEachExportInDLL("msvcrt", [&](const char* name) { string symbolName = name[0] == '?' ? name : string("_") + name; string importName = string("__imp_" + symbolName); hunklist->AddHunkBack(new Hunk(importName.c_str(), name, "msvcrt")); hunklist->AddHunkBack(MakeCallStub(symbolName.c_str())); }); hunklist->MarkHunksAsLibrary(); m_hunkPool.Append(hunklist); delete hunklist; } std::string Crinkler::GetEntrySymbolName() const { if(m_entry.empty()) { switch(m_subsystem) { case SUBSYSTEM_CONSOLE: return "mainCRTStartup"; case SUBSYSTEM_WINDOWS: return "WinMainCRTStartup"; } return ""; } return m_entry; } Symbol* Crinkler::FindEntryPoint() { // Place entry point in the beginning string entryName = GetEntrySymbolName(); Symbol* entry = m_hunkPool.FindUndecoratedSymbol(entryName.c_str()); if(entry == NULL) { Log::Error("", "Cannot find entry point '%s'. See manual for details.", entryName.c_str()); return NULL; } if(entry->value > 0) { Log::Warning("", "Entry point not at start of section, jump necessary"); } return entry; } void Crinkler::RemoveUnreferencedHunks(Hunk* base) { // Check dependencies and remove unused hunks vector<Hunk*> startHunks; startHunks.push_back(base); // Keep hold of exported symbols for (const Export& e : m_exports) { if (e.HasValue()) { Symbol* sym = m_hunkPool.FindSymbol(e.GetName().c_str()); if (sym && !sym->fromLibrary) { Log::Error("", "Cannot create integer symbol '%s' for export: symbol already exists.", e.GetName().c_str()); } } else { Symbol* sym = m_hunkPool.FindSymbol(e.GetSymbol().c_str()); if (sym) { if (sym->hunk->GetRawSize() == 0) { sym->hunk->SetRawSize(sym->hunk->GetVirtualSize()); Log::Warning("", "Uninitialized hunk '%s' forced to data section because of exported symbol '%s'.", sym->hunk->GetName(), e.GetSymbol().c_str()); } startHunks.push_back(sym->hunk); } else { Log::Error("", "Cannot find symbol '%s' to be exported under name '%s'.", e.GetSymbol().c_str(), e.GetName().c_str()); } } } // Hack to ensure that LoadLibrary & MessageBox is there to be used in the import code Symbol* loadLibrary = m_hunkPool.FindSymbol("__imp__LoadLibraryA@4"); Symbol* messageBox = m_hunkPool.FindSymbol("__imp__MessageBoxA@16"); Symbol* dynamicInitializers = m_hunkPool.FindSymbol("__DynamicInitializers"); if(loadLibrary != NULL) startHunks.push_back(loadLibrary->hunk); if(m_useSafeImporting && !m_useTinyImport && messageBox != NULL) startHunks.push_back(messageBox->hunk); if(dynamicInitializers != NULL) startHunks.push_back(dynamicInitializers->hunk); m_hunkPool.RemoveUnreferencedHunks(startHunks); } void Crinkler::LoadImportCode(bool use1kMode, bool useSafeImporting, bool useDllFallback, bool useRangeImport) { // Do imports if (use1kMode){ Load(import1KObj, int(import1KObj_end - import1KObj), "Crinkler import"); } else { if (useSafeImporting) if (useDllFallback) if (useRangeImport) Load(importSafeFallbackRangeObj, int(importSafeFallbackRangeObj_end - importSafeFallbackRangeObj), "Crinkler import"); else Load(importSafeFallbackObj, int(importSafeFallbackObj_end - importSafeFallbackObj), "Crinkler import"); else if (useRangeImport) Load(importSafeRangeObj, int(importSafeRangeObj_end - importSafeRangeObj), "Crinkler import"); else Load(importSafeObj, int(importSafeObj_end - importSafeObj), "Crinkler import"); else if (useDllFallback) Log::Error("", "DLL fallback cannot be used with unsafe importing"); else if (useRangeImport) Load(importRangeObj, int(importRangeObj_end - importRangeObj), "Crinkler import"); else Load(importObj, int(importObj_end - importObj), "Crinkler import"); } } Hunk* Crinkler::CreateModelHunk(int splittingPoint, int rawsize) { Hunk* models; int modelsSize = 16 + m_modellist1.nmodels + m_modellist2.nmodels; unsigned char masks1[256]; unsigned char masks2[256]; unsigned int w1 = m_modellist1.GetMaskList(masks1, false); unsigned int w2 = m_modellist2.GetMaskList(masks2, true); models = new Hunk("models", 0, 0, 0, modelsSize, modelsSize); models->AddSymbol(new Symbol("_Models", 0, SYMBOL_IS_RELOCATEABLE, models)); char* ptr = models->GetPtr(); *(unsigned int*)ptr = -(CRINKLER_CODEBASE+splittingPoint); ptr += sizeof(unsigned int); *(unsigned int*)ptr = w1; ptr += sizeof(unsigned int); for(int m = 0; m < m_modellist1.nmodels; m++) *ptr++ = masks1[m]; *(unsigned int*)ptr = -(CRINKLER_CODEBASE+rawsize); ptr += sizeof(unsigned int); *(unsigned int*)ptr = w2; ptr += sizeof(unsigned int); for(int m = 0; m < m_modellist2.nmodels; m++) *ptr++ = masks2[m]; return models; } int Crinkler::OptimizeHashsize(unsigned char* data, int datasize, int hashsize, int splittingPoint, int tries) { if(tries == 0) return hashsize; int maxsize = datasize*2+1000; int bestsize = INT_MAX; int best_hashsize = hashsize; m_progressBar.BeginTask("Optimizing hash table size"); unsigned char context[MAX_CONTEXT_LENGTH] = {}; HashBits hashbits[2]; hashbits[0] = ComputeHashBits(data, splittingPoint, context, m_modellist1, true, false); hashbits[1] = ComputeHashBits(data + splittingPoint, datasize - splittingPoint, context, m_modellist2, false, true); uint32_t* hashsizes = new uint32_t[tries]; for (int i = 0; i < tries; i++) { hashsize = PreviousPrime(hashsize / 2) * 2; hashsizes[i] = hashsize; } int* sizes = new int[tries]; int progress = 0; concurrency::combinable<vector<unsigned char>> buffers([maxsize]() { return vector<unsigned char>(maxsize, 0); }); concurrency::combinable<vector<TinyHashEntry>> hashtable1([&hashbits]() { return vector<TinyHashEntry>(hashbits[0].tinyhashsize); }); concurrency::combinable<vector<TinyHashEntry>> hashtable2([&hashbits]() { return vector<TinyHashEntry>(hashbits[1].tinyhashsize); }); concurrency::critical_section cs; concurrency::parallel_for(0, tries, [&](int i) { TinyHashEntry* hashtables[] = { hashtable1.local().data(), hashtable2.local().data() }; sizes[i] = CompressFromHashBits4k(hashbits, hashtables, 2, buffers.local().data(), maxsize, m_saturate != 0, CRINKLER_BASEPROB, hashsizes[i], nullptr); Concurrency::critical_section::scoped_lock l(cs); m_progressBar.Update(++progress, m_hashtries); }); for (int i = 0; i < tries; i++) { if (sizes[i] <= bestsize) { bestsize = sizes[i]; best_hashsize = hashsizes[i]; } } delete[] sizes; delete[] hashsizes; m_progressBar.EndTask(); return best_hashsize; } int Crinkler::EstimateModels(unsigned char* data, int datasize, int splittingPoint, bool reestimate, bool use1kMode, int target_size1, int target_size2) { bool verbose = (m_printFlags & PRINT_MODELS) != 0; if (use1kMode) { m_progressBar.BeginTask(reestimate ? "Reestimating models" : "Estimating models"); int size = target_size1; int new_size; ModelList1k new_modellist1k = ApproximateModels1k(data, datasize, &new_size, ProgressUpdateCallback, &m_progressBar); if(new_size < size) { size = new_size; m_modellist1k = new_modellist1k; } m_progressBar.EndTask(); printf("\nEstimated compressed size: %.2f\n", size / (float)(BIT_PRECISION * 8)); if(verbose) m_modellist1k.Print(); return new_size; } else { unsigned char contexts[2][MAX_CONTEXT_LENGTH] = {}; for (int i = 0; i < MAX_CONTEXT_LENGTH; i++) { int srcpos = splittingPoint - MAX_CONTEXT_LENGTH + i; contexts[1][i] = srcpos >= 0 ? data[srcpos] : 0; } int size1 = target_size1; int size2 = target_size2; ModelList4k modellist1, modellist2; int new_size1, new_size2; m_progressBar.BeginTask(reestimate ? "Reestimating models for code" : "Estimating models for code"); modellist1 = ApproximateModels4k(data, splittingPoint, contexts[0], m_compressionType, m_saturate != 0, CRINKLER_BASEPROB, &new_size1, ProgressUpdateCallback, &m_progressBar); m_progressBar.EndTask(); if(new_size1 < size1) { size1 = new_size1; m_modellist1 = modellist1; } if (verbose) { printf("Models: "); m_modellist1.Print(stdout); } printf("Estimated compressed size of code: %.2f\n", size1 / (float)(BIT_PRECISION * 8)); m_progressBar.BeginTask(reestimate ? "Reestimating models for data" : "Estimating models for data"); modellist2 = ApproximateModels4k(data + splittingPoint, datasize - splittingPoint, contexts[1], m_compressionType, m_saturate != 0, CRINKLER_BASEPROB, &new_size2, ProgressUpdateCallback, &m_progressBar); m_progressBar.EndTask(); if(new_size2 < size2) { size2 = new_size2; m_modellist2 = modellist2; } if (verbose) { printf("Models: "); m_modellist2.Print(stdout); } printf("Estimated compressed size of data: %.2f\n", size2 / (float)(BIT_PRECISION * 8)); ModelList4k* modelLists[] = {&m_modellist1, &m_modellist2}; int segmentSizes[] = { splittingPoint, datasize - splittingPoint }; int compressedSizes[2] = {}; int idealsize = EvaluateSize4k(data, 2, segmentSizes, compressedSizes, modelLists, CRINKLER_BASEPROB, m_saturate != 0); printf("\nIdeal compressed size of code: %.2f\n", compressedSizes[0] / (float)(BIT_PRECISION * 8)); printf("Ideal compressed size of data: %.2f\n", compressedSizes[1] / (float)(BIT_PRECISION * 8)); printf("Ideal compressed total size: %.2f\n", idealsize / (float)(BIT_PRECISION * 8)); return idealsize; } } void Crinkler::SetHeaderSaturation(Hunk* header) { if (m_saturate) { static const unsigned char saturateCode[] = { 0x75, 0x03, 0xFE, 0x0C, 0x1F }; header->Insert(header->FindSymbol("_SaturatePtr")->value, saturateCode, sizeof(saturateCode)); *(header->GetPtr() + header->FindSymbol("_SaturateAdjust1Ptr")->value) += sizeof(saturateCode); *(header->GetPtr() + header->FindSymbol("_SaturateAdjust2Ptr")->value) -= sizeof(saturateCode); } } void Crinkler::SetHeaderConstants(Hunk* header, Hunk* phase1, int hashsize, int boostfactor, int baseprob0, int baseprob1, unsigned int modelmask, int subsystem_version, int exports_rva, bool use1kHeader) { header->AddSymbol(new Symbol("_HashTableSize", hashsize/2, 0, header)); header->AddSymbol(new Symbol("_UnpackedData", CRINKLER_CODEBASE, 0, header)); header->AddSymbol(new Symbol("_ImageBase", CRINKLER_IMAGEBASE, 0, header)); header->AddSymbol(new Symbol("_ModelMask", modelmask, 0, header)); if (use1kHeader) { int virtualSizeHighByteOffset = header->FindSymbol("_VirtualSizeHighBytePtr")->value; int lowBytes = *(int*)(header->GetPtr() + virtualSizeHighByteOffset - 3) & 0xFFFFFF; int virtualSize = phase1->GetVirtualSize() + 65536 * 2; *(header->GetPtr() + header->FindSymbol("_BaseProbPtr0")->value) = baseprob0; *(header->GetPtr() + header->FindSymbol("_BaseProbPtr1")->value) = baseprob1; *(header->GetPtr() + header->FindSymbol("_BoostFactorPtr")->value) = boostfactor; *(unsigned short*)(header->GetPtr() + header->FindSymbol("_DepackEndPositionPtr")->value) = phase1->GetRawSize() + CRINKLER_CODEBASE; *(header->GetPtr() + virtualSizeHighByteOffset) = (virtualSize - lowBytes + 0xFFFFFF) >> 24; } else { int virtualSize = Align(max(phase1->GetVirtualSize(), phase1->GetRawSize() + hashsize), 16); header->AddSymbol(new Symbol("_VirtualSize", virtualSize, 0, header)); *(header->GetPtr() + header->FindSymbol("_BaseProbPtr")->value) = CRINKLER_BASEPROB; *(header->GetPtr() + header->FindSymbol("_ModelSkipPtr")->value) = m_modellist1.nmodels + 8; if (exports_rva) { *(int*)(header->GetPtr() + header->FindSymbol("_ExportTableRVAPtr")->value) = exports_rva; *(int*)(header->GetPtr() + header->FindSymbol("_NumberOfDataDirectoriesPtr")->value) = 1; } } *(header->GetPtr() + header->FindSymbol("_SubsystemTypePtr")->value) = subsystem_version; *((short*)(header->GetPtr() + header->FindSymbol("_LinkerVersionPtr")->value)) = CRINKLER_LINKER_VERSION; if (phase1->GetRawSize() >= 2 && (phase1->GetPtr()[0] == 0x5F || phase1->GetPtr()[2] == 0x5F)) { // Code starts with POP EDI => call transform *(header->GetPtr() + header->FindSymbol("_SpareNopPtr")->value) = 0x57; // PUSH EDI } if (m_largeAddressAware) { *((short*)(header->GetPtr() + header->FindSymbol("_CharacteristicsPtr")->value)) |= 0x0020; } } void Crinkler::Recompress(const char* input_filename, const char* output_filename) { MemoryFile file(input_filename); unsigned char* indata = (unsigned char*)file.GetPtr(); FILE* outfile = 0; if (strcmp(input_filename, output_filename) != 0) { // Open output file now, just to be sure if(fopen_s(&outfile, output_filename, "wb")) { Log::Error("", "Cannot open '%s' for writing", output_filename); return; } } int length = file.GetSize(); if(length < 200) { NotCrinklerFileError(); } unsigned int pe_header_offset = *(unsigned int*)&indata[0x3C]; bool is_compatibility_header = false; bool is_tiny_header = false; char majorlv = 0, minorlv = 0; if(pe_header_offset == 4) { is_compatibility_header = false; majorlv = indata[2]; minorlv = indata[3]; if(majorlv >= '2' && indata[0xC] == 0x0F && indata[0xD] == 0xA3 && indata[0xE] == 0x2D) { is_tiny_header = true; } } else if(pe_header_offset == 12) { is_compatibility_header = true; majorlv = indata[38]; minorlv = indata[39]; } else { NotCrinklerFileError(); } if (majorlv < '0' || majorlv > '9' || minorlv < '0' || minorlv > '9') { NotCrinklerFileError(); } // Oops: 0.6 -> 1.0 if (majorlv == '0' && minorlv == '6') { majorlv = '1'; minorlv = '0'; } int version = (majorlv-'0')*10 + (minorlv-'0'); if (is_compatibility_header && version >= 14) { printf("File compressed using a pre-1.4 Crinkler and recompressed using Crinkler version %c.%c\n", majorlv, minorlv); } else { printf("File compressed or recompressed using Crinkler version %c.%c\n", majorlv, minorlv); } switch(majorlv) { case '0': switch(minorlv) { case '1': case '2': case '3': Log::Error("", "Only files compressed using Crinkler 0.4 or newer can be recompressed.\n"); return; break; case '4': case '5': FixHeader04((char*)indata); break; } break; case '1': switch(minorlv) { case '0': FixHeader10((char*)indata); break; } break; } int virtualSize = (*(int*)&indata[pe_header_offset+0x50]) - 0x20000; int hashtable_size = -1; int return_offset = -1; int models_address = -1; int depacker_start = -1; int rawsize_start = -1; int compressed_data_rva = -1; for(int i = 0; i < 0x200; i++) { if(is_tiny_header) { if(indata[i] == 0x7C && indata[i + 2] == 0xC3 && return_offset == -1) { return_offset = i + 2; indata[return_offset] = 0xCC; } if(indata[i] == 0x66 && indata[i + 1] == 0x81 && indata[i + 2] == 0xff) { rawsize_start = i + 3; } if(version <= 21) { if(indata[i] == 0xB9 && indata[i + 1] == 0x00 && indata[i + 2] == 0x00 && indata[i + 3] == 0x00 && indata[i + 4] == 0x00 && indata[i + 5] == 0x59 && indata[i + 6] == 0x6a) { m_modellist1k.baseprob0 = indata[i + 7]; m_modellist1k.baseprob1 = indata[i + 9]; m_modellist1k.modelmask = *(unsigned int*)&indata[i + 11]; } } else { if(indata[i] == 0x6a && indata[i + 2] == 0x3d && indata[i + 3] == 0x00 && indata[i + 4] == 0x00 && indata[i + 5] == 0x00 && indata[i + 6] == 0x00 && indata[i + 7] == 0x6a ) { m_modellist1k.baseprob0 = indata[i + 1]; m_modellist1k.baseprob1 = indata[i + 8]; m_modellist1k.modelmask = *(unsigned int*)&indata[i + 10]; } } if(indata[i] == 0x7F && indata[i + 2] == 0xB1 && indata[i + 4] == 0x89 && indata[i + 5] == 0xE6) { m_modellist1k.boost = indata[i + 3]; } if(indata[i] == 0x0F && indata[i + 1] == 0xA3 && indata[i + 2] == 0x2D && compressed_data_rva == -1) { compressed_data_rva = *(int*)&indata[i + 3]; } } else { if(indata[i] == 0xbf && indata[i + 5] == 0xb9 && hashtable_size == -1) { hashtable_size = (*(int*)&indata[i + 6]) * 2; } if(indata[i] == 0x5A && indata[i + 1] == 0x7B && indata[i + 3] == 0xC3 && return_offset == -1) { return_offset = i + 3; indata[return_offset] = 0xCC; } else if(indata[i] == 0x8D && indata[i + 3] == 0x7B && indata[i + 5] == 0xC3 && return_offset == -1) { return_offset = i + 5; indata[return_offset] = 0xCC; } if(version < 13) { if(indata[i] == 0x4B && indata[i + 1] == 0x61 && indata[i + 2] == 0x7F) { depacker_start = i; } } else if(version == 13) { if(indata[i] == 0x0F && indata[i + 1] == 0xA3 && indata[i + 2] == 0x2D) { depacker_start = i; } } else { if(indata[i] == 0xE8 && indata[i + 5] == 0x60 && indata[i + 6] == 0xAD) { depacker_start = i; } } if(indata[i] == 0xBE && indata[i + 3] == 0x40 && indata[i + 4] == 0x00) { models_address = *(int*)&indata[i + 1]; } } } int models_offset = -1; int rawsize = 0; int splittingPoint = 0; if(is_tiny_header) { if(return_offset == -1 && compressed_data_rva != -1) { NotCrinklerFileError(); } rawsize = *(unsigned short*)&indata[rawsize_start]; splittingPoint = rawsize; } else { if(hashtable_size == -1 || return_offset == -1 || (depacker_start == -1 && is_compatibility_header) || models_address == -1) { NotCrinklerFileError(); } models_offset = models_address - CRINKLER_IMAGEBASE; unsigned int weightmask1 = *(unsigned int*)&indata[models_offset + 4]; unsigned char* models1 = &indata[models_offset + 8]; m_modellist1.SetFromModelsAndMask(models1, weightmask1); int modelskip = 8 + m_modellist1.nmodels; unsigned int weightmask2 = *(unsigned int*)&indata[models_offset + modelskip + 4]; unsigned char* models2 = &indata[models_offset + modelskip + 8]; m_modellist2.SetFromModelsAndMask(models2, weightmask2); if(version >= 13) { rawsize = -(*(int*)&indata[models_offset + modelskip]) - CRINKLER_CODEBASE; splittingPoint = -(*(int*)&indata[models_offset]) - CRINKLER_CODEBASE; } else { rawsize = (*(int*)&indata[models_offset + modelskip]) / 8; splittingPoint = (*(int*)&indata[models_offset]) / 8; } } SetUseTinyHeader(is_tiny_header); CompressionType compmode = m_modellist1.DetectCompressionType(); int subsystem_version = indata[pe_header_offset+0x5C]; int large_address_aware = (*(unsigned short *)&indata[pe_header_offset+0x16] & 0x0020) != 0; static const unsigned char saturateCode[] = { 0x75, 0x03, 0xFE, 0x0C, 0x1F }; bool saturate = std::search(indata, indata + length, std::begin(saturateCode), std::end(saturateCode)) != indata + length; if (m_saturate == -1) m_saturate = saturate; int exports_rva = 0; if(!is_tiny_header && majorlv >= '2') { exports_rva = *(int*)&indata[pe_header_offset + 0x78]; } printf("Original file size: %d\n", length); printf("Original Tiny Header: %s\n", is_tiny_header ? "YES" : "NO"); printf("Original Virtual size: %d\n", virtualSize); printf("Original Subsystem type: %s\n", subsystem_version == 3 ? "CONSOLE" : "WINDOWS"); printf("Original Large address aware: %s\n", large_address_aware ? "YES" : "NO"); if(!is_tiny_header) { printf("Original Compression mode: %s\n", compmode == COMPRESSION_INSTANT ? "INSTANT" : version < 21 ? "FAST/SLOW" : "FAST/SLOW/VERYSLOW"); printf("Original Saturate counters: %s\n", saturate ? "YES" : "NO"); printf("Original Hash size: %d\n", hashtable_size); } if(is_tiny_header) { printf("Total size: %d\n", rawsize); printf("\n"); } else { printf("Code size: %d\n", splittingPoint); printf("Data size: %d\n", rawsize - splittingPoint); printf("\n"); } STARTUPINFO startupInfo = {0}; startupInfo.cb = sizeof(startupInfo); char tempPath[MAX_PATH]; GetTempPath(MAX_PATH, tempPath); char tempFilename[MAX_PATH]; GetTempFileName(tempPath, "", 0, tempFilename); PROCESS_INFORMATION pi; if(!file.Write(tempFilename)) { Log::Error("", "Failed to write to temporary file '%s'\n", tempFilename); } CreateProcess(tempFilename, NULL, NULL, NULL, false, NORMAL_PRIORITY_CLASS|CREATE_SUSPENDED, NULL, NULL, &startupInfo, &pi); DebugActiveProcess(pi.dwProcessId); ResumeThread(pi.hThread); bool done = false; do { DEBUG_EVENT de; if(WaitForDebugEvent(&de, 120000) == 0) { Log::Error("", "Program was been unresponsive for more than 120 seconds - closing down\n"); } if(de.dwDebugEventCode == EXCEPTION_DEBUG_EVENT && (de.u.Exception.ExceptionRecord.ExceptionAddress == (PVOID)(size_t)(0x410000+return_offset) || de.u.Exception.ExceptionRecord.ExceptionAddress == (PVOID)(size_t)(0x400000+return_offset))) { done = true; } if(!done) ContinueDebugEvent(de.dwProcessId, de.dwThreadId, DBG_CONTINUE); } while(!done); unsigned char* rawdata = new unsigned char[rawsize]; SIZE_T read; if(ReadProcessMemory(pi.hProcess, (LPCVOID)0x420000, rawdata, rawsize, &read) == 0 || read != rawsize) { Log::Error("", "Failed to read process memory\n"); } CloseHandle(pi.hProcess); CloseHandle(pi.hThread); // Patch calltrans code int import_offset = 0; if (rawdata[0] == 0x89 && rawdata[1] == 0xD7) { // MOV EDI, EDX // Old calltrans code - convert to new unsigned int ncalls = rawdata[5]; rawdata[0] = 0x5F; // POP EDI rawdata[1] = 0xB9; // MOV ECX, DWORD *((unsigned int *)&rawdata[2]) = ncalls; printf("Call transformation code successfully patched.\n"); import_offset = 24; } else if (rawdata[0] == 0x5F) { // POP EDI // New calltrans code printf("Call transformation code does not need patching.\n"); import_offset = 24; } // Patch import code static const unsigned char old_import_code[] = {0x31, 0xC0, 0x64, 0x8B, 0x40, 0x30, 0x8B, 0x40, 0x0C, 0x8B, 0x40, 0x1C, 0x8B, 0x40, 0x00, 0x8B, 0x68, 0x08}; static const unsigned char new_import_code[] = {0x64, 0x67, 0x8B, 0x47, 0x30, 0x8B, 0x40, 0x0C, 0x8B, 0x40, 0x0C, 0x8B, 0x00, 0x8B, 0x00, 0x8B, 0x68, 0x18}; static const unsigned char new_import_code2[] ={0x58, 0x8B, 0x40, 0x0C, 0x8B, 0x40, 0x0C, 0x8B, 0x00, 0x8B, 0x00, 0x8B, 0x68, 0x18}; static const unsigned char tiny_import_code[] ={0x58, 0x8B, 0x40, 0x0C, 0x8B, 0x40, 0x0C, 0x8B, 0x40, 0x00, 0x8B, 0x40, 0x00, 0x8B, 0x40, 0x18 }; bool found_import = false; int hashes_address = -1; int hashes_address_offset = -1; int dll_names_address = -1; bool is_tiny_import = false; for (int i = import_offset ; i < splittingPoint-(int)sizeof(old_import_code) ; i++) { if (rawdata[i] == 0xBB) { hashes_address_offset = i + 1; hashes_address = *(int*)&rawdata[hashes_address_offset]; } if (rawdata[i] == 0xBE) { dll_names_address = *(int*)&rawdata[i + 1]; } if(rawdata[i] == 0xBF) { dll_names_address = *(int*)&rawdata[i + 1]; } if (memcmp(rawdata+i, old_import_code, sizeof(old_import_code)) == 0) { // No calltrans memcpy(rawdata+i, new_import_code, sizeof(new_import_code)); printf("Import code successfully patched.\n"); found_import = true; break; } if (memcmp(rawdata+i, new_import_code, sizeof(new_import_code)) == 0 || memcmp(rawdata+i, new_import_code2, sizeof(new_import_code2)) == 0) { printf("Import code does not need patching.\n"); found_import = true; break; } if(memcmp(rawdata + i, tiny_import_code, sizeof(tiny_import_code)) == 0) { printf("Import code does not need patching.\n"); found_import = true; is_tiny_import = true; break; } } if(!found_import || dll_names_address == -1) { Log::Error("", "Cannot find old import code to patch\n"); } // Make the 1k report a little more readable if(is_tiny_header && dll_names_address - CRINKLER_CODEBASE < splittingPoint) { splittingPoint = dll_names_address - CRINKLER_CODEBASE; } SetUseTinyImport(is_tiny_import); printf("\n"); if (!m_replaceDlls.empty()) { if(is_tiny_header) { char* start_ptr = (char*)&rawdata[dll_names_address - CRINKLER_CODEBASE]; char* end_ptr = (char*)&rawdata[rawsize]; for(const auto& kv : m_replaceDlls) { char* pos = std::search(start_ptr, end_ptr, kv.first.begin(), kv.first.end()); if(pos != end_ptr) { strcpy(pos, kv.second.c_str()); } } } else { char* name = (char*)&rawdata[dll_names_address + 1 - CRINKLER_CODEBASE]; while(name[0] != (char)0xFF) { if(m_replaceDlls.count(name)) { assert(m_replaceDlls[name].length() == strlen(name)); strcpy(name, m_replaceDlls[name].c_str()); } name += strlen(name) + 2; } } } HunkList* headerHunks = NULL; if(is_tiny_header) { headerHunks = m_hunkLoader.Load(header1KObj, int(header1KObj_end - header1KObj), "crinkler header"); } else { if(is_compatibility_header) { headerHunks = m_hunkLoader.Load(headerCompatibilityObj, int(headerCompatibilityObj_end - headerCompatibilityObj), "crinkler header"); } else { headerHunks = m_hunkLoader.Load(headerObj, int(headerObj_end - headerObj), "crinkler header"); } } Hunk* header = headerHunks->FindSymbol("_header")->hunk; Hunk* depacker = nullptr; if (is_compatibility_header) { depacker = headerHunks->FindSymbol("_DepackEntry")->hunk; SetHeaderSaturation(depacker); } if(!is_tiny_import) { int new_hashes_address = is_compatibility_header ? CRINKLER_IMAGEBASE : CRINKLER_IMAGEBASE + header->GetRawSize(); *(int*)&rawdata[hashes_address_offset] = new_hashes_address; } Hunk* phase1 = new Hunk("linked", (char*)rawdata, HUNK_IS_CODE|HUNK_IS_WRITEABLE, 0, rawsize, virtualSize); delete[] rawdata; if(!is_tiny_header) { // Handle exports std::set<Export> exports; printf("Original Exports:"); if(exports_rva) { exports = StripExports(phase1, exports_rva); printf("\n"); PrintExports(exports); if(!m_stripExports) { for(const Export& e : exports) { AddExport(e); } } } else { printf(" NONE\n"); } printf("Resulting Exports:"); if(!m_exports.empty()) { printf("\n"); PrintExports(m_exports); for(const Export& e : m_exports) { if(!e.HasValue()) { Symbol *sym = phase1->FindSymbol(e.GetSymbol().c_str()); if(!sym) { Log::Error("", "Cannot find symbol '%s' to be exported under name '%s'.", e.GetSymbol().c_str(), e.GetName().c_str()); } } } int padding = exports_rva ? 0 : 16; phase1->SetVirtualSize(phase1->GetRawSize() + padding); Hunk* export_hunk = CreateExportTable(m_exports); HunkList hl; hl.AddHunkBack(phase1); hl.AddHunkBack(export_hunk); Hunk* with_exports = hl.ToHunk("linked", CRINKLER_CODEBASE); hl.Clear(); with_exports->SetVirtualSize(virtualSize); with_exports->Relocate(CRINKLER_CODEBASE); delete phase1; phase1 = with_exports; } else { printf(" NONE\n"); } } phase1->Trim(); printf("\nRecompressing...\n"); int maxsize = phase1->GetRawSize()*2+1000; int* sizefill = new int[maxsize]; unsigned char* data = new unsigned char[maxsize]; int best_hashsize = 0; int size; if(is_tiny_header) { size = Compress1k((unsigned char*)phase1->GetPtr(), phase1->GetRawSize(), data, maxsize, m_modellist1k, sizefill, nullptr); printf("Real compressed total size: %d\n", size); } else { int idealsize = 0; if(m_compressionType < 0) { // Keep models if(m_hashsize < 0) { // Use original optimized hash size SetHashsize((hashtable_size - 1) / (1024 * 1024) + 1); best_hashsize = hashtable_size; SetHashtries(0); } else { best_hashsize = PreviousPrime(m_hashsize / 2) * 2; InitProgressBar(); // Rehash best_hashsize = OptimizeHashsize((unsigned char*)phase1->GetPtr(), phase1->GetRawSize(), best_hashsize, splittingPoint, m_hashtries); DeinitProgressBar(); } } else { if(m_hashsize < 0) { SetHashsize((hashtable_size - 1) / (1024 * 1024) + 1); } best_hashsize = PreviousPrime(m_hashsize / 2) * 2; if(m_compressionType != COMPRESSION_INSTANT) { InitProgressBar(); idealsize = EstimateModels((unsigned char*)phase1->GetPtr(), phase1->GetRawSize(), splittingPoint, false, false, INT_MAX, INT_MAX); // Hashing best_hashsize = OptimizeHashsize((unsigned char*)phase1->GetPtr(), phase1->GetRawSize(), best_hashsize, splittingPoint, m_hashtries); DeinitProgressBar(); } } ModelList4k* modelLists[] = { &m_modellist1, &m_modellist2 }; int segmentSizes[] = { splittingPoint, phase1->GetRawSize() - splittingPoint }; size = Compress4k((unsigned char*)phase1->GetPtr(), 2, segmentSizes, data, maxsize, modelLists, m_saturate != 0, CRINKLER_BASEPROB, best_hashsize, sizefill); if(m_compressionType != -1 && m_compressionType != COMPRESSION_INSTANT) { int sizeIncludingModels = size + m_modellist1.nmodels + m_modellist2.nmodels; float byteslost = sizeIncludingModels - idealsize / (float)(BIT_PRECISION * 8); printf("Real compressed total size: %d\nBytes lost to hashing: %.2f\n", sizeIncludingModels, byteslost); } SetCompressionType(compmode); } if(is_compatibility_header) { // Copy hashes from old header DWORD* new_header_ptr = (DWORD*)header->GetPtr(); DWORD* old_header_ptr = (DWORD*)indata; for(int i = 0; i < depacker_start / 4; i++) { if(new_header_ptr[i] == 'HSAH') new_header_ptr[i] = old_header_ptr[i]; } header->SetRawSize(depacker_start); header->SetVirtualSize(depacker_start); } Hunk *hashHunk = nullptr; if (!is_compatibility_header && !is_tiny_import) { // Create hunk with hashes int hashes_offset = hashes_address - CRINKLER_IMAGEBASE; int hashes_bytes = is_tiny_header ? (compressed_data_rva - CRINKLER_IMAGEBASE - hashes_offset) : (models_offset - hashes_offset); hashHunk = new Hunk("HashHunk", (char*)&indata[hashes_offset], 0, 0, hashes_bytes, hashes_bytes); } if (m_subsystem >= 0) { subsystem_version = (m_subsystem == SUBSYSTEM_WINDOWS) ? IMAGE_SUBSYSTEM_WINDOWS_GUI : IMAGE_SUBSYSTEM_WINDOWS_CUI; } if (m_largeAddressAware == -1) { m_largeAddressAware = large_address_aware; } SetSubsystem((subsystem_version == IMAGE_SUBSYSTEM_WINDOWS_GUI) ? SUBSYSTEM_WINDOWS : SUBSYSTEM_CONSOLE); Hunk *phase2 = FinalLink(header, depacker, hashHunk, phase1, data, size, splittingPoint, best_hashsize); delete[] data; CompressionReportRecord* csr = phase1->GetCompressionSummary(sizefill, splittingPoint); if(m_printFlags & PRINT_LABELS) VerboseLabels(csr); if(!m_summaryFilename.empty()) HtmlReport(csr, m_summaryFilename.c_str(), *phase1, *phase1, sizefill, output_filename, phase2->GetRawSize(), this); delete csr; delete[] sizefill; if (!outfile) { if(fopen_s(&outfile, output_filename, "wb")) { Log::Error("", "Cannot open '%s' for writing", output_filename); return; } } fwrite(phase2->GetPtr(), 1, phase2->GetRawSize(), outfile); fclose(outfile); printf("\nOutput file: %s\n", output_filename); printf("Final file size: %d\n\n", phase2->GetRawSize()); delete phase1; delete phase2; } Hunk* Crinkler::CreateDynamicInitializerHunk() { const int num_hunks = m_hunkPool.GetNumHunks(); std::vector<Symbol*> symbols; for(int i = 0; i < num_hunks; i++) { Hunk* hunk = m_hunkPool[i]; if(EndsWith(hunk->GetName(), "CRT$XCU")) { int num_relocations = hunk->GetNumRelocations(); Relocation* relocations = hunk->GetRelocations(); for(int i = 0; i < num_relocations; i++) { symbols.push_back(m_hunkPool.FindSymbol(relocations[i].symbolname.c_str())); } } } if(!symbols.empty()) { const int num_symbols = (int)symbols.size(); const int hunk_size = num_symbols*5; Hunk* hunk = new Hunk("dynamic initializer calls", NULL, HUNK_IS_CODE, 0, hunk_size, hunk_size); char* ptr = hunk->GetPtr(); for(int i = 0; i < num_symbols; i++) { *ptr++ = (char)0xE8; *ptr++ = 0x00; *ptr++ = 0x00; *ptr++ = 0x00; *ptr++ = 0x00; Relocation r; r.offset = i*5+1; r.symbolname = symbols[i]->name; r.type = RELOCTYPE_REL32; hunk->AddRelocation(r); } hunk->AddSymbol(new Symbol("__DynamicInitializers", 0, SYMBOL_IS_RELOCATEABLE, hunk)); printf("\nIncluded %d dynamic initializer%s.\n", num_symbols, num_symbols == 1 ? "" : "s"); return hunk; } return NULL; } void Crinkler::Link(const char* filename) { // Open output file immediate, just to be sure FILE* outfile; int old_filesize = 0; if (!fopen_s(&outfile, filename, "rb")) { // Find old size fseek(outfile, 0, SEEK_END); old_filesize = ftell(outfile); fclose(outfile); } if(fopen_s(&outfile, filename, "wb")) { Log::Error("", "Cannot open '%s' for writing", filename); return; } // Find entry hunk and move it to front Symbol* entry = FindEntryPoint(); if(entry == NULL) return; Hunk* dynamicInitializersHunk = NULL; if (m_runInitializers) { dynamicInitializersHunk = CreateDynamicInitializerHunk(); if(dynamicInitializersHunk) { m_hunkPool.AddHunkBack(dynamicInitializersHunk); } } // Color hunks from entry hunk RemoveUnreferencedHunks(entry->hunk); // Replace DLLs ReplaceDlls(m_hunkPool); if (m_overrideAlignments) OverrideAlignments(m_hunkPool); // 1-byte align entry point and other sections int n_unaligned = 0; bool entry_point_unaligned = false; if(entry->hunk->GetAlignmentBits() > 0) { entry->hunk->SetAlignmentBits(0); n_unaligned++; entry_point_unaligned = true; } if (m_unalignCode) { for (int i = 0; i < m_hunkPool.GetNumHunks(); i++) { Hunk* hunk = m_hunkPool[i]; if (hunk->GetFlags() & HUNK_IS_CODE && !(hunk->GetFlags() & HUNK_IS_ALIGNED) && hunk->GetAlignmentBits() > 0) { hunk->SetAlignmentBits(0); n_unaligned++; } } } if (n_unaligned > 0) { printf("Forced alignment of %d code hunk%s to 1", n_unaligned, n_unaligned > 1 ? "s" : ""); if (entry_point_unaligned) { printf(" (including entry point)"); } printf(".\n"); } // Load appropriate header HunkList* headerHunks = m_useTinyHeader ? m_hunkLoader.Load(header1KObj, int(header1KObj_end - header1KObj), "crinkler header") : m_hunkLoader.Load(headerObj, int(headerObj_end - headerObj), "crinkler header"); Hunk* header = headerHunks->FindSymbol("_header")->hunk; if(!m_useTinyHeader) SetHeaderSaturation(header); Hunk* hashHunk = NULL; int hash_bits; int max_dll_name_length; bool usesRangeImport=false; { // Add imports HunkList* importHunkList = m_useTinyImport ? ImportHandler::CreateImportHunks1K(&m_hunkPool, (m_printFlags & PRINT_IMPORTS) != 0, hash_bits, max_dll_name_length) : ImportHandler::CreateImportHunks(&m_hunkPool, hashHunk, m_fallbackDlls, m_rangeDlls, (m_printFlags & PRINT_IMPORTS) != 0, usesRangeImport); m_hunkPool.RemoveImportHunks(); m_hunkPool.Append(importHunkList); delete importHunkList; } LoadImportCode(m_useTinyImport, m_useSafeImporting, !m_fallbackDlls.empty(), usesRangeImport); Symbol* importSymbol = m_hunkPool.FindSymbol("_Import"); if(dynamicInitializersHunk) { m_hunkPool.RemoveHunk(dynamicInitializersHunk); m_hunkPool.AddHunkFront(dynamicInitializersHunk); dynamicInitializersHunk->SetContinuation(entry); } Hunk* importHunk = importSymbol->hunk; m_hunkPool.RemoveHunk(importHunk); m_hunkPool.AddHunkFront(importHunk); importHunk->SetAlignmentBits(0); importHunk->SetContinuation(dynamicInitializersHunk ? dynamicInitializersHunk->FindSymbol("__DynamicInitializers") : entry); // Make sure import and startup code has access to the _ImageBase address importHunk->AddSymbol(new Symbol("_ImageBase", CRINKLER_IMAGEBASE, 0, importHunk)); importHunk->AddSymbol(new Symbol("___ImageBase", CRINKLER_IMAGEBASE, 0, importHunk)); if(m_useTinyImport) { *(importHunk->GetPtr() + importHunk->FindSymbol("_HashShiftPtr")->value) = 32 - hash_bits; *(importHunk->GetPtr() + importHunk->FindSymbol("_MaxNameLengthPtr")->value) = max_dll_name_length; } // Truncate floats if(m_truncateFloats) { printf("\nTruncating floats:\n"); m_hunkPool.RoundFloats(m_truncateBits); } if (!m_exports.empty()) { m_hunkPool.AddHunkBack(CreateExportTable(m_exports)); } // Sort hunks heuristically HeuristicHunkSorter::SortHunkList(&m_hunkPool); int best_hashsize = PreviousPrime(m_hashsize / 2) * 2; Reuse *reuse = nullptr; int reuse_filesize = 0; ReuseType reuseType = m_useTinyHeader ? REUSE_OFF : m_reuseType; if (reuseType != REUSE_OFF && reuseType != REUSE_WRITE) { reuse = LoadReuseFile(m_reuseFilename.c_str()); if (reuse != nullptr) { m_modellist1 = *reuse->GetCodeModels(); m_modellist2 = *reuse->GetDataModels(); ExplicitHunkSorter::SortHunkList(&m_hunkPool, reuse); best_hashsize = reuse->GetHashSize(); printf("\nRead reuse file: %s\n", m_reuseFilename.c_str()); } } // Create phase 1 data hunk int splittingPoint; Hunk* phase1, *phase1Untransformed; m_hunkPool[0]->AddSymbol(new Symbol("_HeaderHashes", CRINKLER_IMAGEBASE+header->GetRawSize(), SYMBOL_IS_SECTION, m_hunkPool[0])); if (!m_transform->LinkAndTransform(&m_hunkPool, importSymbol, CRINKLER_CODEBASE, phase1, &phase1Untransformed, &splittingPoint, true)) { // Transform failed, run again delete phase1; delete phase1Untransformed; m_transform->LinkAndTransform(&m_hunkPool, importSymbol, CRINKLER_CODEBASE, phase1, &phase1Untransformed, &splittingPoint, false); } int maxsize = phase1->GetRawSize()*2+1000; // Allocate plenty of memory unsigned char* data = new unsigned char[maxsize]; if (reuseType == REUSE_IMPROVE && reuse != nullptr) { ModelList4k* modelLists[] = { &m_modellist1, &m_modellist2 }; int segmentSizes[] = { splittingPoint, phase1->GetRawSize()- splittingPoint }; int size = Compress4k((unsigned char*)phase1->GetPtr(), 2, segmentSizes, data, maxsize, modelLists, m_saturate != 0, CRINKLER_BASEPROB, best_hashsize, nullptr); Hunk *phase2 = FinalLink(header, nullptr, hashHunk, phase1, data, size, splittingPoint, best_hashsize); reuse_filesize = phase2->GetRawSize(); delete phase2; printf("\nFile size with reuse parameters: %d\n", reuse_filesize); } printf("\nUncompressed size of code: %5d\n", splittingPoint); printf("Uncompressed size of data: %5d\n", phase1->GetRawSize() - splittingPoint); int* sizefill = new int[maxsize]; int size, idealsize = 0; if (m_useTinyHeader || m_compressionType != COMPRESSION_INSTANT) { if (reuseType == REUSE_STABLE && reuse != nullptr) { // Calculate ideal size with reuse parameters ModelList4k* modelLists[] = { &m_modellist1, &m_modellist2 }; int segmentSizes[] = { splittingPoint, phase1->GetRawSize() - splittingPoint}; int compressedSizes[2] = {}; idealsize = EvaluateSize4k((unsigned char*)phase1->GetPtr(), 2, segmentSizes, compressedSizes, modelLists, CRINKLER_BASEPROB, m_saturate != 0); printf("\nIdeal compressed size of code: %.2f\n", compressedSizes[0] / (float)(BIT_PRECISION * 8)); printf("Ideal compressed size of data: %.2f\n", compressedSizes[1] / (float)(BIT_PRECISION * 8)); printf("Ideal compressed total size: %.2f\n", idealsize / (float)(BIT_PRECISION * 8)); } else { // Full size estimation and hunk reordering bool verbose_models = (m_printFlags & PRINT_MODELS) != 0; InitProgressBar(); idealsize = EstimateModels((unsigned char*)phase1->GetPtr(), phase1->GetRawSize(), splittingPoint, false, m_useTinyHeader, INT_MAX, INT_MAX); if (m_hunktries > 0) { int target_size1, target_size2; EmpiricalHunkSorter::SortHunkList(&m_hunkPool, *m_transform, m_modellist1, m_modellist2, m_modellist1k, CRINKLER_BASEPROB, m_saturate != 0, m_hunktries, m_showProgressBar ? &m_windowBar : NULL, m_useTinyHeader, &target_size1, &target_size2); delete phase1; delete phase1Untransformed; m_transform->LinkAndTransform(&m_hunkPool, importSymbol, CRINKLER_CODEBASE, phase1, &phase1Untransformed, &splittingPoint, true); idealsize = EstimateModels((unsigned char*)phase1->GetPtr(), phase1->GetRawSize(), splittingPoint, true, m_useTinyHeader, target_size1, target_size2); } // Hashing time if (!m_useTinyHeader) { best_hashsize = PreviousPrime(m_hashsize / 2) * 2; best_hashsize = OptimizeHashsize((unsigned char*)phase1->GetPtr(), phase1->GetRawSize(), best_hashsize, splittingPoint, m_hashtries); } DeinitProgressBar(); } } if (m_useTinyHeader) { size = Compress1k((unsigned char*)phase1->GetPtr(), phase1->GetRawSize(),data, maxsize, m_modellist1k, sizefill, nullptr); } else { ModelList4k* modelLists[] = { &m_modellist1, &m_modellist2 }; int segmentSizes[] = { splittingPoint, phase1->GetRawSize() - splittingPoint }; size = Compress4k((unsigned char*)phase1->GetPtr(), 2, segmentSizes, data, maxsize, modelLists, m_saturate != 0, CRINKLER_BASEPROB, best_hashsize, sizefill); } if(!m_useTinyHeader && m_compressionType != COMPRESSION_INSTANT) { int sizeIncludingModels = size + m_modellist1.nmodels + m_modellist2.nmodels; float byteslost = sizeIncludingModels - idealsize / (float) (BIT_PRECISION * 8); printf("Real compressed total size: %d\nBytes lost to hashing: %.2f\n", sizeIncludingModels, byteslost); } Hunk *phase2 = FinalLink(header, nullptr, hashHunk, phase1, data, size, splittingPoint, best_hashsize); delete[] data; CompressionReportRecord* csr = phase1->GetCompressionSummary(sizefill, splittingPoint); if(m_printFlags & PRINT_LABELS) VerboseLabels(csr); if(!m_summaryFilename.empty()) HtmlReport(csr, m_summaryFilename.c_str(), *phase1, *phase1Untransformed, sizefill, filename, phase2->GetRawSize(), this); delete csr; delete[] sizefill; fwrite(phase2->GetPtr(), 1, phase2->GetRawSize(), outfile); fclose(outfile); printf("\nOutput file: %s\n", filename); printf("Final file size: %d", phase2->GetRawSize()); if (old_filesize) { if (old_filesize != phase2->GetRawSize()) { printf(" (previous size %d)", old_filesize); } else { printf(" (no change)"); } } printf("\n\n"); if (reuseType != REUSE_OFF) { bool write = false; if (reuse == nullptr) { printf("Writing reuse file: %s\n\n", m_reuseFilename.c_str()); write = true; } else if (reuseType == REUSE_IMPROVE) { if (phase2->GetRawSize() < reuse_filesize) { printf("Overwriting reuse file: %s\n\n", m_reuseFilename.c_str()); write = true; delete reuse; } else { printf("Size not better than with reuse parameters - keeping reuse file: %s\n\n", m_reuseFilename.c_str()); } } if (write) { reuse = new Reuse(m_modellist1, m_modellist2, m_hunkPool, best_hashsize); reuse->Save(m_reuseFilename.c_str()); } } if (phase2->GetRawSize() > 128*1024) { Log::Error(filename, "Output file too big. Crinkler does not support final file sizes of more than 128k."); } if (reuse) delete reuse; delete phase1; delete phase1Untransformed; delete phase2; } Hunk *Crinkler::FinalLink(Hunk *header, Hunk *depacker, Hunk *hashHunk, Hunk *phase1, unsigned char *data, int size, int splittingPoint, int hashsize) { Hunk* phase1Compressed = new Hunk("compressed data", (char*)data, 0, 0, size, size); phase1Compressed->AddSymbol(new Symbol("_PackedData", 0, SYMBOL_IS_RELOCATEABLE, phase1Compressed)); Hunk *modelHunk = nullptr; if (!m_useTinyHeader) { header->AddSymbol(new Symbol("_HashTable", CRINKLER_SECTIONSIZE * 2 + phase1->GetRawSize(), SYMBOL_IS_RELOCATEABLE, header)); modelHunk = CreateModelHunk(splittingPoint, phase1->GetRawSize()); } HunkList phase2list; phase2list.AddHunkBack(new Hunk(*header)); if (depacker) phase2list.AddHunkBack(new Hunk(*depacker)); if (hashHunk) phase2list.AddHunkBack(new Hunk(*hashHunk)); if (modelHunk) phase2list.AddHunkBack(modelHunk); phase2list.AddHunkBack(phase1Compressed); Hunk* phase2 = phase2list.ToHunk("final", CRINKLER_IMAGEBASE); // Add constants int exports_rva = m_useTinyHeader || m_exports.empty() ? 0 : phase1->FindSymbol("_ExportTable")->value + CRINKLER_CODEBASE - CRINKLER_IMAGEBASE; SetHeaderConstants(phase2, phase1, hashsize, m_modellist1k.boost, m_modellist1k.baseprob0, m_modellist1k.baseprob1, m_modellist1k.modelmask, m_subsystem == SUBSYSTEM_WINDOWS ? IMAGE_SUBSYSTEM_WINDOWS_GUI : IMAGE_SUBSYSTEM_WINDOWS_CUI, exports_rva, m_useTinyHeader); phase2->Relocate(CRINKLER_IMAGEBASE); return phase2; } void Crinkler::PrintOptions(FILE *out) { fprintf(out, " /SUBSYSTEM:%s", m_subsystem == SUBSYSTEM_CONSOLE ? "CONSOLE" : "WINDOWS"); if (m_largeAddressAware) { fprintf(out, " /LARGEADDRESSAWARE"); } if (!m_entry.empty()) { fprintf(out, " /ENTRY:%s", m_entry.c_str()); } if(m_useTinyHeader) { fprintf(out, " /TINYHEADER"); } if(m_useTinyImport) { fprintf(out, " /TINYIMPORT"); } if(!m_useTinyHeader) { fprintf(out, " /COMPMODE:%s", CompressionTypeName(m_compressionType)); if (m_saturate) { fprintf(out, " /SATURATE"); } fprintf(out, " /HASHSIZE:%d", m_hashsize / 1048576); } if (m_compressionType != COMPRESSION_INSTANT) { if(!m_useTinyHeader) { fprintf(out, " /HASHTRIES:%d", m_hashtries); } fprintf(out, " /ORDERTRIES:%d", m_hunktries); } for(int i = 0; i < (int)m_rangeDlls.size(); i++) { fprintf(out, " /RANGE:%s", m_rangeDlls[i].c_str()); } for(const auto& p : m_replaceDlls) { fprintf(out, " /REPLACEDLL:%s=%s", p.first.c_str(), p.second.c_str()); } for (const auto& p : m_fallbackDlls) { fprintf(out, " /FALLBACKDLL:%s=%s", p.first.c_str(), p.second.c_str()); } if (!m_useTinyHeader && !m_useSafeImporting) { fprintf(out, " /UNSAFEIMPORT"); } if (m_transform->GetDetransformer() != NULL) { fprintf(out, " /TRANSFORM:CALLS"); } if (m_truncateFloats) { fprintf(out, " /TRUNCATEFLOATS:%d", m_truncateBits); } if (m_overrideAlignments) { fprintf(out, " /OVERRIDEALIGNMENTS"); if (m_alignmentBits != -1) { fprintf(out, ":%d", m_alignmentBits); } } if (m_unalignCode) { fprintf(out, " /UNALIGNCODE"); } if (!m_runInitializers) { fprintf(out, " /NOINITIALIZERS"); } for (const Export& e : m_exports) { if (e.HasValue()) { fprintf(out, " /EXPORT:%s=0x%08X", e.GetName().c_str(), e.GetValue()); } else if (e.GetName() == e.GetSymbol()) { fprintf(out, " /EXPORT:%s", e.GetName().c_str()); } else { fprintf(out, " /EXPORT:%s=%s", e.GetName().c_str(), e.GetSymbol().c_str()); } } } void Crinkler::InitProgressBar() { m_progressBar.AddProgressBar(&m_consoleBar); if(m_showProgressBar) m_progressBar.AddProgressBar(&m_windowBar); m_progressBar.Init(); } void Crinkler::DeinitProgressBar() { m_progressBar.Deinit(); }
22,716
3,783
<reponame>sachinshrestha483/AlgoDS<filename>src/toptal/Second.java package toptal; import java.math.BigInteger; import java.util.Arrays; import java.util.stream.Collectors; /** * Why Did you create this class? what does it do? */ public class Second { public static void main(String[] args) { System.out.println(solution(new int[] { 1 })); } static public int solution(int[] a) { // write your code in Java SE 8 if (a == null || a.length == 0) return 0; StringBuilder sb = new StringBuilder(a.length); for (int i = 0; i < a.length; i++) { sb.append(a[i]); } sb = sb.reverse(); BigInteger integer = new BigInteger(sb.toString()); integer = integer.multiply(BigInteger.valueOf(17)); int result = 0; String s = integer.toString(); for (int i = 0; i < s.length(); i++) { result += s.charAt(i) - '0'; } return result; } }
437
2,138
<filename>3d/scripts/generate_combined_front_panel.py<gh_stars>1000+ #!/usr/bin/env python3 # Copyright 2021 <NAME> and the splitflap contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import division from __future__ import print_function import argparse import logging import os import sys from kerf_presets import KERF_PRESETS from svg_processor import SvgProcessor from projection_renderer import Renderer script_dir = os.path.dirname(os.path.abspath(__file__)) source_parts_dir = os.path.dirname(script_dir) repo_root = os.path.dirname(source_parts_dir) sys.path.append(repo_root) CENTER_MODES = { 'letter': 0, 'window': 1, 'module': 2, } def render(extra_variables, output_directory): renderer = Renderer(os.path.join(source_parts_dir, 'combined_front_panel.scad'), output_directory, extra_variables) renderer.clean() svg_output = renderer.render_svgs(panelize_quantity = 1) logging.info('\n\n\nDone rendering to SVG: ' + svg_output) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser("") kerf_group = parser.add_mutually_exclusive_group(required=True) kerf_group.add_argument('--kerf', type=float, help='Set kerf_width value') kerf_group.add_argument('--kerf-preset', choices=KERF_PRESETS, help='Set kerf_width using a defined preset') kerf_group.add_argument('--tool-diameter', type=float, help='Diameter of cutting tool') parser.add_argument('--rows', type=int, required=True, help='Number of rows') parser.add_argument('--cols', type=int, required=True, help='Number of columns') x_group = parser.add_mutually_exclusive_group(required=True) x_group.add_argument('--spacing-x', type=float, help='Horizontal gap between modules') x_group.add_argument('--center-center-x', type=float, help='Horizontal center-to-center distance between modules') y_group = parser.add_mutually_exclusive_group(required=True) y_group.add_argument('--spacing-y', type=float, help='Vertical gap between modules') y_group.add_argument('--center-center-y', type=float, help='Vertical center-to-center distance between modules') width_group = parser.add_mutually_exclusive_group(required=True) width_group.add_argument('--width', type=float, help='Width of the panel') width_group.add_argument('--frame-margin-x', type=float, help='Margin to add to the left and right sides') height_group = parser.add_mutually_exclusive_group(required=True) height_group.add_argument('--height', type=float, help='Height of the panel') height_group.add_argument('--frame-margin-y', type=float, help='Margin to add to the top and bottom') parser.add_argument('--center-mode', choices=CENTER_MODES, required=True, help='Specify how modules should be centered') args = parser.parse_args() extra_variables = { 'render_etch': False, } if args.kerf is not None: extra_variables['kerf_width'] = args.kerf elif args.kerf_preset is not None: extra_variables['kerf_width'] = KERF_PRESETS[args.kerf_preset] elif args.tool_diameter is not None: extra_variables['tool_diameter'] = args.tool_diameter extra_variables['rows'] = args.rows extra_variables['cols'] = args.cols if args.spacing_x is not None: extra_variables['gap_x'] = args.spacing_x if args.spacing_y is not None: extra_variables['gap_y'] = args.spacing_y if args.center_center_x is not None: extra_variables['center_center_x'] = args.center_center_x if args.center_center_y is not None: extra_variables['center_center_y'] = args.center_center_y if args.width is not None: extra_variables['frame_width'] = args.width if args.height is not None: extra_variables['frame_height'] = args.height if args.frame_margin_x is not None: extra_variables['frame_margin_x'] = args.frame_margin_x if args.frame_margin_y is not None: extra_variables['frame_margin_y'] = args.frame_margin_y extra_variables['center_mode'] = CENTER_MODES[args.center_mode] output_dir = os.path.join(source_parts_dir, 'build', 'front_panel') render(extra_variables, output_dir)
1,700
406
import os import errno import string import random def mkdir_p(path): "like `mkdir -p`" try: os.makedirs(path) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def random_string(n): s = string.ascii_letters result = "" for _ in xrange(n): result += random.choice(s) return result
198
1,362
<reponame>BrimmingDev/twilio-python<filename>tests/integration/numbers/v2/regulatory_compliance/test_regulation.py # coding=utf-8 r""" This code was generated by \ / _ _ _| _ _ | (_)\/(_)(_|\/| |(/_ v1.0.0 / / """ from tests import IntegrationTestCase from tests.holodeck import Request from twilio.base.exceptions import TwilioException from twilio.http.response import Response class RegulationTestCase(IntegrationTestCase): def test_list_request(self): self.holodeck.mock(Response(500, '')) with self.assertRaises(TwilioException): self.client.numbers.v2.regulatory_compliance \ .regulations.list() self.holodeck.assert_has_request(Request( 'get', 'https://numbers.twilio.com/v2/RegulatoryCompliance/Regulations', )) def test_read_empty_response(self): self.holodeck.mock(Response( 200, ''' { "results": [], "meta": { "page": 0, "page_size": 50, "first_page_url": "https://numbers.twilio.com/v2/RegulatoryCompliance/Regulations?IsoCountry=US&EndUserType=business&NumberType=mobile&PageSize=50&Page=0", "previous_page_url": null, "url": "https://numbers.twilio.com/v2/RegulatoryCompliance/Regulations?IsoCountry=US&EndUserType=business&NumberType=mobile&PageSize=50&Page=0", "next_page_url": null, "key": "results" } } ''' )) actual = self.client.numbers.v2.regulatory_compliance \ .regulations.list() self.assertIsNotNone(actual) def test_read_full_response(self): self.holodeck.mock(Response( 200, ''' { "results": [ { "sid": "RNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "friendly_name": "Australia: Local - Individual", "iso_country": "AU", "number_type": "local", "end_user_type": "individual", "requirements": { "end_user": [ { "name": "Individual", "type": "individual", "url": "https://numbers.twilio.com/v2/RegulatoryCompliance/Regulations/individual", "fields": [ "first_name", "last_name" ] } ], "supporting_document": [ [ { "name": "Address", "type": "document", "description": "The physical location of the individual or business. Must be within locality or region covered by the phone numbers prefix; a PO Box is not acceptable where a local address is required.", "accepted_documents": [ { "name": "Address Validation", "type": "address", "url": "https://numbers.twilio.com/v2/RegulatoryCompliance/DocumentTypes/address", "fields": [] } ] } ] ] }, "url": "https://numbers.twilio.com/v2/RegulatoryCompliance/Regulations/RNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } ], "meta": { "page": 0, "page_size": 50, "first_page_url": "https://numbers.twilio.com/v2/RegulatoryCompliance/Regulations?PageSize=50&Page=0", "previous_page_url": null, "url": "https://numbers.twilio.com/v2/RegulatoryCompliance/Regulations?PageSize=50&Page=0", "next_page_url": null, "key": "results" } } ''' )) actual = self.client.numbers.v2.regulatory_compliance \ .regulations.list() self.assertIsNotNone(actual) def test_fetch_request(self): self.holodeck.mock(Response(500, '')) with self.assertRaises(TwilioException): self.client.numbers.v2.regulatory_compliance \ .regulations("RNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch() self.holodeck.assert_has_request(Request( 'get', 'https://numbers.twilio.com/v2/RegulatoryCompliance/Regulations/RNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX', )) def test_fetch_response(self): self.holodeck.mock(Response( 200, ''' { "sid": "RNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "friendly_name": "Australia: Local - Individual", "iso_country": "AU", "number_type": "local", "end_user_type": "individual", "requirements": { "end_user": [ { "name": "Individual", "type": "individual", "url": "https://numbers.twilio.com/v2/RegulatoryCompliance/Regulations/individual", "fields": [ "first_name", "last_name" ] } ], "supporting_document": [ [ { "name": "Address", "type": "document", "description": "The physical location of the individual or business. Must be within locality or region covered by the phone numbers prefix; a PO Box is not acceptable where a local address is required.", "accepted_documents": [ { "name": "Address Validation", "type": "address", "url": "https://numbers.twilio.com/v2/RegulatoryCompliance/DocumentTypes/address", "fields": [] } ] } ] ] }, "url": "https://numbers.twilio.com/v2/RegulatoryCompliance/Regulations/RNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } ''' )) actual = self.client.numbers.v2.regulatory_compliance \ .regulations("RNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch() self.assertIsNotNone(actual)
4,664
1,010
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.examples.files.avro; import com.hazelcast.jet.Jet; import com.hazelcast.jet.JetInstance; import com.hazelcast.jet.avro.AvroSinks; import com.hazelcast.jet.pipeline.Pipeline; import com.hazelcast.jet.pipeline.Sources; import com.hazelcast.map.IMap; import org.apache.avro.Schema; import org.apache.avro.SchemaBuilder; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Map; /** * Demonstrates dumping a map's values to an Apache Avro file. */ public class AvroSink { public static final String MAP_NAME = "userMap"; public static final String DIRECTORY_NAME; static { Path path = Paths.get(AvroSink.class.getClassLoader().getResource("").getPath()); DIRECTORY_NAME = path.getParent().getParent().toString() + "/users"; } private JetInstance jet; private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); Schema schema = schemaForUser(); p.readFrom(Sources.<String, User>map(MAP_NAME)) .map(Map.Entry::getValue) .writeTo(AvroSinks.files(DIRECTORY_NAME, User.class, schema)); return p; } public static void main(String[] args) throws Exception { new AvroSink().go(); } private void go() { try { setup(); jet.newJob(buildPipeline()).join(); } finally { Jet.shutdownAll(); } } private void setup() { jet = Jet.bootstrappedInstance(); IMap<String, User> map = jet.getMap(MAP_NAME); for (int i = 0; i < 100; i++) { User user = new User("User" + i, "pass" + i, i, i % 2 == 0); map.put(user.getUsername(), user); } } private static Schema schemaForUser() { return SchemaBuilder.record(User.class.getSimpleName()) .namespace(User.class.getPackage().getName()) .fields() .name("username").type().stringType().noDefault() .name("password").type().stringType().noDefault() .name("age").type().intType().noDefault() .name("status").type().booleanType().noDefault() .endRecord(); } }
1,222
335
{ "word": "Braunschweiger", "definitions": [ "A variety of smoked liver sausage." ], "parts-of-speech": "Noun" }
62
2,326
/** * Copyright (c) 2016-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE_render file in the root directory of this subproject. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ #pragma once #include <vector> #include "CameraIsp.h" #include "opencv2/imgproc.hpp" namespace surround360 { namespace color_calibration { using namespace std; using namespace cv; struct ColorPatch { Point2f centroid; Mat mask; Vec3f rgbMedian; Vec3f labMedian; }; struct ColorResponse { Vec3f rgbInterceptXMax; Vec3f rgbInterceptXMin; Vec3f rgbInterceptY; Vec3f rgbSlope; }; // Reference grayscale values for plotting purposes const vector<int> rgbGrayLinearMacbeth = {6, 21, 49, 92, 150, 233}; // MacBeth patches from // <NAME>, "RGB coordinates of the Macbeth ColorChecker", The BabelColor Company, June 2006 map<string, vector<vector<float> > > const labMacbeth { {"D50", { {37.99, 13.56, 14.06}, {65.71, 18.13, 17.81}, {49.93, -4.88, -21.93}, {43.14, -13.10, 21.91}, {55.11, 8.84, -25.40}, {70.72, -33.40, -0.199}, {62.66, 36.07, 57.10}, {40.02, 10.41, -45.96}, {51.12, 48.24, 16.25}, {30.33, 22.98, -21.59}, {72.53, -23.71, 57.26}, {71.94, 19.36, 67.86}, {28.78, 14.18, -50.30}, {55.26, -38.34, 31.37}, {42.10, 53.38, 28.19}, {81.73, 4.04, 79.82}, {51.94, 49.99, -14.57}, {51.04, -28.63, -28.64}, {96.54, -0.425, 1.186}, {81.26, -0.638, -0.335}, {66.77, -0.734, -0.504}, {50.87, -0.153, -0.270}, {35.66, -0.421, -1.231}, {20.46, -0.079, -0.973}}}, {"D65", { {37.85, 12.72, 14.07}, {65.43, 17.18, 17.21}, {50.15, -1.91, -21.79}, {43.17, -15.08, 22.44}, {55.40, 11.58, -25.06}, {70.92, -33.22, 0.29}, {62.06, 33.37, 56.24}, {40.59, 16.15, -45.14}, {50.58, 47.55, 15.17}, {30.51, 25.11, -21.74}, {72.31, -27.84, 57.83}, {71.43, 15.50, 67.80}, {29.46, 20.74, -49.34}, {55.26, -41.23, 32.03}, {41.53, 52.67, 26.92}, {81.08, -0.33, 80.10}, {51.74, 51.26, -15.48}, {52.41, -18.46, -26.64}, {96.49, -0.35, 0.96}, {81.17, -0.69, -0.24}, {66.84, -0.71, -0.25}, {50.86, 0.20, -0.55}, {35.61, -0.36, -1.44}, {20.40, 0.47, -1.27}} } }; // Get image bit depth int getBitsPerPixel(const Mat& image); // Loads the given JSON file into a usable string string getJson(const string& filename); // Loads raw image using the ISP setup, so output is 16-bit and [0..1] Mat getRaw(const string& ispConfigFile, const Mat& image); // Created a grayscale map where crushed pixels are set to 0 and saturated // pixels are set to 1. Assuming 8-bit input (it's just for visualization) Mat findClampedPixels(const Mat& image8); // Computes color channel responses from the grayscale patches on the // colorchecker ColorResponse computeRGBResponse( const Mat& raw, const bool isRaw, vector<ColorPatch>& colorPatches, const string& ispConfigFile, const bool saveDebugImages, const string& outputDir, int& stepDebugImages, const string& titleExtra); // Saves black level of each channel to text file void saveBlackLevel(const Vec3f& blackLevel, const string& outputDir); // Saves X-intercepts of each channel for the given RGB response. Saves values // to text file void saveXIntercepts(const ColorResponse& colorResponse, const string& outputDir); // Generates ISP config file with all the given parameters void writeIspConfigFile( const string& ispConfigFileOut, CameraIsp& cameraIsp, const Vec3f& blackLevel, const Vec3f& whiteBalanceGain, const Mat& ccm, const Vec3f& gamma); // Updates input ISP config file with clamp values void updateIspWithClamps( const string& ispConfigFilePath, const int bpp, const Vec3f& clampMin, const Vec3f& clampMax); // Finds black level. Assumes there's a black hole in the input image Vec3f findBlackLevel( const Mat& raw16, const int minNumPixels, const string& ispConfigFile, const bool saveDebugImages, const string& outputDir, int& stepDebugImages); // Computes one dimensional histogram of input image Mat computeHistogram(const Mat& image, const Mat& mask); // Detects color chart patches on the input image. Returns a list of color // patches containing location, shape and color information vector<ColorPatch> detectColorChart( const Mat& image, const int numSquaresW, const int numSquaresH, const float minAreaChart, const float maxAreaChart, const bool saveDebugImages, const string& outputDir, int& stepDebugImages); // Fills gaps in input binary image Mat fillGaps( const Mat& imageBw, const float elementSize, const bool saveDebugImages, const string& outputDir, int& stepDebugImages); // Dilates gaps to avoid outliers on contour detection Mat dilateGaps( const Mat& imageBw, const float elementSize, const bool saveDebugImages, const string& outputDir, int& stepDebugImages); // Creates structuring element of given shape for morphological operations Mat createMorphElement( const Size imageSize, const float elementSize, const int shape); // Removes small objects Mat removeSmallObjects( const Mat& imageBw, const float smallestObjectSize, const bool saveDebugImages, const string& outputDir, int& stepDebugImages); // Finds straight contours on input image vector<vector<Point>> findContours( const Mat& image, const bool saveDebugImages, const string& outputDir, int& stepDebugImages, const float straightenFactor); // Removes outliers from given color patch list vector<ColorPatch> removeContourOutliers(vector<ColorPatch> colorPatchList); // Sorts patches from top left to bottom right vector<ColorPatch> sortPatches( const vector<ColorPatch>& colorPatchList, const int numSquaresW, const Size imageSize); // Finds the point closest to the top-left corner of the image Point2f findTopLeft(const vector<Point2f>& points); // Finds the point closest to the top-right corner of the image Point2f findTopRight(const vector<Point2f>& points, const int imageWidth); // Finds the distance from a point to a line defined by two points float pointToLineDistance( const Point2f p, const Point2f pLine1, const Point2f pLine2); // Draws color patches on top of given image Mat drawPatches(const Mat& image, vector<ColorPatch>& colorPatches); // Computes RGB medians of each given color patch void computeRGBMedians( vector<ColorPatch>& colorPatches, const Mat& bgr, const bool isRaw, const string& ispConfigFile); // Computes RGB medians on given mask Vec3f getRgbMedianMask( const Mat& image, const Mat& mask, const string& ispConfigFile, const bool isRaw); Vec3f plotGrayPatchResponse( vector<ColorPatch>& colorPatches, const Mat& rgb, const bool isRaw, const string& ispConfigFile, const string& titleExtra, const string& outputDir, int& stepDebugImages); // Calculates black level, white balance and CCM from given color patches void obtainIspParams( vector<ColorPatch>& colorPatches, const string& illuminant, const Size& imageSize, const bool isBlackLevelSet, const bool saveDebugImages, const string& outputDir, int& stepDebugImages, Vec3f& blackLevel, Vec3f& whiteBalance, Mat& ccm); // Compute DeltaE errors between corrected color patches and MacBeth ground // truth (Lab) void computeColorPatchErrors( const vector<ColorPatch>& colorPatches, const string& illuminant, const string& outputDir, const string& titleExtra); } // namespace color_calibration } // namespace surround360
2,775
4,036
<reponame>timoles/codeql<filename>python/ql/test/library-tests/PointsTo/regressions/wrong/module-imports/conflict-stdlib/code-invalid-package-name/cmd.py foo = "Foo" print("my own cmd imported")
71
511
#ifndef __CONFIG_H__ #define __CONFIG_H__ //!< TizenRT Macro #define OK 0 //!< Features #define CONFIG_UI #define CONFIG_UI_DISPLAY_RGB888 #define CONFIG_UI_ENABLE_TOUCH #define CONFIG_UI_ENABLE_EMOJI //!< Values #define CONFIG_UI_TOUCH_THRESHOLD (10) #define CONFIG_UI_DISPLAY_WIDTH (360) #define CONFIG_UI_DISPLAY_HEIGHT (360) #define CONFIG_UI_STACK_SIZE (8192) #define CONFIG_UI_UPDATE_MEMPOOL_SIZE (128) #define CONFIG_UI_MAXIMUM_FPS (30) #define CONFIG_UI_DISPLAY_SCALE (1) #endif
246
473
<filename>third-party/qemu-orp/hw/openrisc/ul/ul576_0.h /* * AUTOGENERATED July 2014 Copyright 2015, Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #ifndef __UL576_0__ #define __UL576_0__ #include "ul32.h" /* * ul576 */ typedef struct ul576_s { uint32_t x[18]; } ul576[1]; typedef ul576 tidl_ul576_t; void ul576_init(ul576 x); void ul576_clear(ul576 x); typedef struct mod576_s { ul576 n; uint32_t np; ul576 rsq; } mod576[1]; /* * Print a ul576 to stdout */ void ul576_print(const ul576 src); /* * Returns 1 if the given ul576 is zero, else returns 0 */ int ul576_iszero(const ul576 src); /* * Setters */ void ul576_set(ul576 dst, const ul576 src); /* * Set all of the limbs of a ul576 at once */ void ul576_set_fullui(ul576 dst, uint32_t l17, uint32_t l16, uint32_t l15, uint32_t l14, uint32_t l13, uint32_t l12, uint32_t l11, uint32_t l10, uint32_t l9, uint32_t l8, uint32_t l7, uint32_t l6, uint32_t l5, uint32_t l4, uint32_t l3, uint32_t l2, uint32_t l1, uint32_t l0); /* * Set a ul576 to a uint32_t */ void ul576_set_ui(ul576 dst, uint32_t i); /* * Get a uint32_t out of a ul576 */ uint32_t ul576_get_ui(const ul576 src); /* * Compare two ul576's */ int ul576_cmp(const ul576 src1, const ul576 src2); /* * Compare a ul576 with a uint32_t */ int ul576_cmp_ui(const ul576 src1, uint32_t src2); /* * Yields 1 + index of the most significant bit */ uint32_t ul576_msb(const ul576 src); /* * Returns 1 if (1 << i) & src > 0, else returns 0 */ uint32_t ul576_testbit(uint32_t i, const ul576 src); /* * Sets the indicated bit, as per (1 << i) */ void ul576_setbit(uint32_t i, ul576 src); /* * Add two ul576's */ void ul576_add(ul576 dst, const ul576 src1, const ul576 src2); /* * Sub two ul576's */ void ul576_sub(ul576 dst, const ul576 src1, const ul576 src2); /* * Mul two ul576's */ void ul576_mul(ul576 dst, const ul576 src1, const ul576 src2); /* * Initialize mod576 */ void mod576_init(mod576 n); /* * Add two ul576's modulo another */ void ul576_modadd(ul576 dst, const ul576 src1, const ul576 src2, const mod576 n); /* * Subtract one ul576 from another modulo a third */ void ul576_modsub(ul576 dst, const ul576 src1, const ul576 src2, const mod576 n); /* * Mul two ul576's modulo a third, followed by Montgomery reduction */ void ul576_modmul(ul576 _dst, const ul576 _src1, const ul576 _src2, const mod576 n); /* * Convert a ul576 into Montgomery form */ void ul576_to_montgomery(ul576 dst, const ul576 src, const mod576 mod); /* * Convert a ul576 out-of Montgomery form */ void ul576_from_montgomery(ul576 dst, const ul576 src, const mod576 mod); /* * Right-shift a ul576 by some number of bits */ void ul576_rshift(ul576 dst, const ul576 src, int shift); /* * Left shift a ul576 by some number of words */ void ul576_lshiftw(ul576 dst, const ul576 src, int w); /* * Multiply a ul576 by a uint32_t */ void ul576_mulu32(ul576 dst, const ul576 src, uint32_t x); #endif
1,329
5,168
<reponame>Olalaye/MegEngine /** * \file dnn/test/common/conv_pooling.h * MegEngine is Licensed under the Apache License, Version 2.0 (the "License") * * Copyright (c) 2014-2021 Megvii Inc. All rights reserved. * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ #pragma once #include "megdnn/basic_types.h" #include "megdnn/opr_param_defs.h" namespace megdnn { namespace test { namespace conv_pooling { struct TestArg { param::ConvPooling param; TensorShape src, filter, bias; TestArg(param::ConvPooling param, TensorShape src, TensorShape filter, TensorShape bias) : param(param), src(src), filter(filter), bias(bias) {} }; std::vector<TestArg> get_args(); } // namespace conv_pooling } // namespace test } // namespace megdnn // vim: syntax=cpp.doxygen
341
653
<gh_stars>100-1000 public class MaxMinArray { public static void main(String [] args){ int vetor[] = {9, 0, 4, 2, 3, 8, 7, 1, 6, 5}; MaxMinRecursivo(vetor, vetor[0], vetor[0], 0); System.out.println("---------"); MaxMinIterativo(vetor); } public static void MaxMinRecursivo(int vetor[], int max, int min, int indice){ if(vetor[indice] > max) max = vetor[indice]; if(vetor[indice] < min) min = vetor[indice]; if(indice < vetor.length-1) MaxMinRecursivo(vetor, max, min, indice+1); else{ System.out.println("Max : " + max); System.out.println("Min : " + min); } } public static void MaxMinIterativo(int vetor[]){ int max = vetor[0], min = vetor[0]; if( vetor.length > 1){ for (int i = 0; i < vetor.length; i++) { if(vetor[i] > max) max = vetor[i]; else if(vetor[i] < min) min = vetor[i]; } } System.out.println("Max : " + max); System.out.println("Min : " + min); } }
441
348
{"nom":"Lachapelle-sous-Gerberoy","circ":"2ème circonscription","dpt":"Oise","inscrits":107,"abs":35,"votants":72,"blancs":8,"nuls":3,"exp":61,"res":[{"nuance":"REM","nom":"<NAME>","voix":37},{"nuance":"FN","nom":"<NAME>","voix":24}]}
96
1,444
<reponame>J-VOL/mage<gh_stars>1000+ package mage.cards.m; import mage.MageInt; import mage.abilities.Ability; import mage.abilities.common.AttacksTriggeredAbility; import mage.abilities.effects.common.UntapTargetEffect; import mage.abilities.keyword.CrewAbility; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.SubType; import mage.filter.common.FilterControlledPermanent; import mage.filter.predicate.Predicates; import mage.filter.predicate.mageobject.AnotherPredicate; import mage.target.common.TargetControlledPermanent; import java.util.UUID; /** * * @author Styxo */ public final class MobileGarrison extends CardImpl { private static final FilterControlledPermanent filter = new FilterControlledPermanent("another target artifact or creature you control"); static { filter.add(AnotherPredicate.instance); filter.add(Predicates.or( CardType.ARTIFACT.getPredicate(), CardType.CREATURE.getPredicate() )); } public MobileGarrison(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.ARTIFACT}, "{3}"); this.subtype.add(SubType.VEHICLE); this.power = new MageInt(3); this.toughness = new MageInt(4); // Whenever Mobile Garrison attacks, untap another target artifact or creature you control. Ability ability = new AttacksTriggeredAbility(new UntapTargetEffect(), false); ability.addTarget(new TargetControlledPermanent(filter)); this.addAbility(ability); // Crew 2 this.addAbility(new CrewAbility(2)); } private MobileGarrison(final MobileGarrison card) { super(card); } @Override public MobileGarrison copy() { return new MobileGarrison(this); } }
661
493
<reponame>SirArep/ecal<filename>app/rec/rec_server_core/src/recorder/local_recorder.h /* ========================= eCAL LICENSE ================================= * * Copyright (C) 2016 - 2019 Continental Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ========================= eCAL LICENSE ================================= */ #pragma once #include "abstract_recorder.h" #include <ThreadingUtils/InterruptibleLoopThread.h> #include <memory> namespace eCAL { namespace rec { class EcalRec; } namespace rec_server { class LocalRecorder : public AbstractRecorder, protected InterruptibleLoopThread { ////////////////////////////////////// // Constructor & Destructor ////////////////////////////////////// public: explicit LocalRecorder(const std::string& hostname , const std::shared_ptr<eCAL::rec::EcalRec>& ecal_rec_instance , const std::function<void(const std::string& hostname, const eCAL::rec::RecorderStatus& recorder_status)>& update_jobstatus_function , const std::function<void(int64_t job_id, const std::string& hostname, const std::pair<bool, std::string>& info_command_response)>& report_job_command_response_callback , const RecorderSettings& initial_settings); ~LocalRecorder(); ////////////////////////////////////// // Interruptible Thread overrrides ////////////////////////////////////// protected: void Loop() override; ////////////////////////////////////// // Public API ////////////////////////////////////// public: virtual void SetRecorderEnabled(bool enabled, bool connect_to_ecal = false) override; virtual bool IsRecorderEnabled() const override; virtual bool EverParticipatedInAMeasurement() const override; virtual void SetSettings(const RecorderSettings& settings) override; virtual void SetCommand(const RecorderCommand& command) override; virtual bool IsAlive() const override; virtual std::pair<eCAL::rec::RecorderStatus, eCAL::Time::ecal_clock::time_point> GetStatus() const override; virtual bool IsRequestPending() const override; virtual void WaitForPendingRequests() const override; virtual std::pair<bool, std::string> GetLastResponse() const override; ////////////////////////////////////// // Member Variables ////////////////////////////////////// private: mutable std::mutex ecal_rec_instance_and_status_mutex_; std::shared_ptr<eCAL::rec::EcalRec> ecal_rec_instance_; eCAL::rec::RecorderStatus last_status_; std::atomic<bool> ever_participated_in_a_measurement_; bool recorder_enabled_; bool is_in_sync_; std::pair<bool, std::string> last_response_; RecorderSettings complete_settings_; bool should_be_connected_to_ecal_; }; } }
1,196
716
/* * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. * See https://llvm.org/LICENSE.txt for license information. * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception * */ #include "mth_intrinsics.h" #if defined(TARGET_LINUX_POWER) #include <altivec.h> #elif defined(TARGET_ARM64) #include "arm64intrin.h" #elif defined(LINUX8664) || defined(TARGET_OSX_X8664) #include <immintrin.h> #else #error Unknown architecture #endif #if PRECSIZE == 4 #define PREC s #define FLOAT float #else #define PREC d #define FLOAT double #endif #define CONCAT4_(a,b,c,d) a##b##c##d #define CONCAT4(a,b,c,d) CONCAT4_(a,b,c,d) #define CONCAT5_(a,b,c,d,e) a##b##c##d##e #define CONCAT5(a,b,c,d,e) CONCAT5_(a,b,c,d,e) #define EXPAND(a) a #define VFLOAT CONCAT4(vr,PREC,VLEN,_t) #define VINT CONCAT4(vi,PREC,VLEN,_t) #ifndef TARGET_OSX_X8664 #define VFLOATRETURN CONCAT4(__mth_return2,VFLOAT,,) #else /* * OSX does not support weak aliases - so just use the generic for all * vector types. */ #define VFLOATRETURN __mth_return2vectors #endif #define GENERICNAME CONCAT4(__g,PREC,_sincos_,VLEN) #define GENERICNAMEMASK CONCAT5(__g,PREC,_sincos_,VLEN,m) #if defined(LINUX8664) || defined(TARGET_OSX_X8664) #define _s_VL_4 #define _d_VL_2 #define _s_VL_8 256 #define _d_VL_4 256 #define _s_VL_16 512 #define _d_VL_8 512 #define __VLSIZE(_prec,_vlen) _##_prec##_VL_##_vlen #define _VLSIZE(_prec,_vlen) __VLSIZE(_prec,_vlen) #define VEC_LOAD(_a) (VFLOAT)CONCAT4(_mm,_VLSIZE(PREC,VLEN),_load_p,PREC)((FLOAT *)_a) #elif defined(TARGET_LINUX_POWER) /* * POWER intrinsic does note seems to accept (double *) as an address in vec_ld(). * Thus make the argument always look like a (float *). */ #define VEC_LOAD(_a) (VFLOAT) vec_ld(0, (float *)_a) #elif defined(TARGET_ARM64) #define VEC_LOAD(_a) (VFLOAT) vec_ld(0, (float *)_a) #else #error Unknown architecture #endif extern "C" void sincos(double, double*, double*) throw(); extern "C" VFLOAT VFLOATRETURN(VFLOAT, VFLOAT); extern "C" VFLOAT GENERICNAME(VFLOAT x) { int i; FLOAT ts[VLEN] __attribute__((__aligned__(16))); FLOAT tc[VLEN] __attribute__((__aligned__(16))); for (i = 0 ; i < VLEN; i++) SINCOS(x[i], &ts[i], &tc[i]); return VFLOATRETURN(VEC_LOAD(&ts), VEC_LOAD(&tc)); } extern "C" VFLOAT GENERICNAMEMASK(VFLOAT x, VINT mask) { int i; FLOAT ts[VLEN] __attribute__((__aligned__(16))); FLOAT tc[VLEN] __attribute__((__aligned__(16))); for (i = 0 ; i < VLEN; i++) { if (mask[i] != 0) { SINCOS(x[i], &ts[i], &tc[i]); } } return VFLOATRETURN(VEC_LOAD(&ts), VEC_LOAD(&tc)); }
1,258
1,013
/*! @authors <NAME> (<EMAIL>) @date 2014-2020 @copyright BSD-3-Clause */ #include <gtest/gtest.h> #include <pyclustering/container/adjacency.hpp> #include <algorithm> using namespace pyclustering::container; void template_set_connection(adjacency_collection & collection) { for (size_t i = 0; i < collection.size(); i++) { for (size_t j = i + 1; j < collection.size(); j++) { ASSERT_FALSE(collection.has_connection(i, j)); collection.set_connection(i, j); ASSERT_TRUE(collection.has_connection(i, j)); ASSERT_FALSE(collection.has_connection(j, i)); collection.set_connection(j, i); ASSERT_TRUE(collection.has_connection(j, i)); } } } void template_has_no_connection(adjacency_collection & collection) { for (size_t i = 0; i < collection.size(); i++) { for (size_t j = 0; j < collection.size(); j++) { ASSERT_FALSE(collection.has_connection(i, j)); } } } void template_has_all_connection(adjacency_collection & collection) { for (size_t i = 0; i < collection.size(); i++) { for (size_t j = 0; j < collection.size(); j++) { collection.set_connection(i, j); } } for (size_t i = 0; i < collection.size(); i++) { for (size_t j = 0; j < collection.size(); j++) { ASSERT_TRUE(collection.has_connection(i, j)); } } } void template_erase_connection(adjacency_collection & collection) { for (size_t i = 0; i < collection.size(); i++) { for (size_t j = i + 1; j < collection.size(); j++) { collection.set_connection(i, j); collection.set_connection(j, i); } } for (size_t i = 0; i < collection.size(); i++) { for (size_t j = i + 1; j < collection.size(); j++) { ASSERT_TRUE(collection.has_connection(i, j)); ASSERT_TRUE(collection.has_connection(j, i)); collection.erase_connection(i, j); ASSERT_FALSE(collection.has_connection(i, j)); ASSERT_TRUE(collection.has_connection(j, i)); collection.erase_connection(j, i); ASSERT_FALSE(collection.has_connection(i, j)); ASSERT_FALSE(collection.has_connection(j, i)); } } } void template_get_neighbors_sizes(adjacency_collection & collection) { std::vector<size_t> node_neighbors; for (size_t i = 0; i < collection.size(); i++) { for (size_t j = i + 1; j < collection.size(); j++) { collection.set_connection(i, j); collection.set_connection(j, i); collection.get_neighbors(i, node_neighbors); ASSERT_EQ(j, node_neighbors.size()); collection.get_neighbors(j, node_neighbors); ASSERT_EQ(i + 1, node_neighbors.size()); } } } void template_get_neighbors_indexes(adjacency_collection & collection) { std::vector<size_t> node_neighbors; for (size_t i = 0; i < collection.size(); i++) { for (size_t j = i + 1; j < collection.size(); j++) { collection.set_connection(i, j); collection.set_connection(j, i); } } for (size_t i = 0; i < collection.size(); i++) { collection.get_neighbors(i, node_neighbors); ASSERT_EQ(collection.size() - 1, node_neighbors.size()); std::vector<bool> index_neighbor_checker(collection.size(), false); for (size_t j = 0; j < node_neighbors.size(); j++) { size_t neighbor_index = node_neighbors[j]; index_neighbor_checker[neighbor_index] = true; } for (size_t j = 0; j < node_neighbors.size(); j++) { if (i != j) { ASSERT_TRUE(index_neighbor_checker[j]); } else { ASSERT_FALSE(index_neighbor_checker[i]); } } } } void template_no_get_neighbors(adjacency_collection & collection) { std::vector<size_t> node_neighbors; for (size_t i = 0; i < collection.size(); i++) { collection.get_neighbors(i, node_neighbors); ASSERT_EQ(0U, node_neighbors.size()); } } void template_all_get_neighbors(adjacency_collection & collection) { std::vector<size_t> node_neighbors; for (size_t i = 0; i < collection.size(); i++) { for (size_t j = 0; j < collection.size(); j++) { collection.set_connection(i, j); } } for (size_t i = 0; i < collection.size(); i++) { for (size_t j = 0; j < collection.size(); j++) { collection.get_neighbors(i, node_neighbors); ASSERT_EQ(collection.size(), node_neighbors.size()); std::sort(node_neighbors.begin(), node_neighbors.end()); for (size_t index = 0; index < collection.size(); index++) { ASSERT_EQ(index, node_neighbors[index]); } } } } void template_get_neighbors_after_erase(adjacency_collection & collection) { /* full insert */ for (size_t i = 0; i < collection.size(); i++) { for (size_t j = 0; j < collection.size(); j++) { collection.set_connection(i, j); } } /* full erase */ for (size_t i = 0; i < collection.size(); i++) { for (size_t j = 0; j < collection.size(); j++) { collection.erase_connection(i, j); } } /* check that there is no neighbors */ for (size_t i = 0; i < collection.size(); i++) { std::vector<size_t> node_neighbors; collection.get_neighbors(i, node_neighbors); ASSERT_EQ(0U, node_neighbors.size()); } } void template_set_weight_connection(adjacency_weight_collection & collection) { for (size_t i = 0; i < collection.size(); i++) { for (size_t j = 0; j < collection.size(); j++) { ASSERT_EQ(0.0, collection.get_connection_weight(i, j)); ASSERT_FALSE(collection.has_connection(i, j)); const double weight = (double) i + (double) j / 10.0 + 1.0; collection.set_connection_weight(i, j, weight); ASSERT_EQ(weight, collection.get_connection_weight(i, j)); ASSERT_TRUE(collection.has_connection(i, j)); } } } void template_set_default_weight_connection(adjacency_weight_collection & collection) { for (size_t i = 0; i < collection.size(); i++) { for (size_t j = 0; j < collection.size(); j++) { ASSERT_EQ(0.0, collection.get_connection_weight(i, j)); ASSERT_FALSE(collection.has_connection(i, j)); collection.set_connection(i, j); ASSERT_NE(0.0, collection.get_connection_weight(i, j)); ASSERT_TRUE(collection.has_connection(i, j)); } } } void template_set_negative_weight(adjacency_weight_collection & collection) { for (size_t i = 0; i < collection.size(); i++) { for (size_t j = 0; j < collection.size(); j++) { ASSERT_EQ(0.0, collection.get_connection_weight(i, j)); ASSERT_FALSE(collection.has_connection(i, j)); collection.set_connection_weight(i, j, -1.0); ASSERT_EQ(-1.0, collection.get_connection_weight(i, j)); ASSERT_TRUE(collection.has_connection(i, j)); } } } void template_get_neighbors_positive_negative(adjacency_weight_collection & collection) { for (size_t i = 0; i < collection.size(); i++) { for (size_t j = 0; j < collection.size(); j++) { if (i % 2 == 0) { collection.set_connection_weight(i, j, 10.0); } else { collection.set_connection_weight(i, j, -10.0); } } } for (size_t i = 0; i < collection.size(); i++) { std::vector<size_t> node_neighbors; collection.get_neighbors(i, node_neighbors); ASSERT_EQ(collection.size(), node_neighbors.size()); } }
3,983
384
<reponame>AlanCheen/PracticeDemo package com.example.concurrent; /** * Created by 程序亦非猿 on 15/10/24. */ public class WaitNotify { public int money = 0; public void buy() { synchronized (this) { try { System.out.println("try to buy Thread:"+Thread.currentThread().getName()); /** * NOTICE if的话,会出现负数 * NOTICE while的话每次都会判断 不会出现金钱为负数 * 因为if直接下去了,while再notify后还会进入条件判断 */ // if (money <= 0) { while (money == 0) { System.out.println("屌丝!请先赚钱!~"); wait(); } money = money - 100; System.out.println("土豪!购买成功!~~还剩下:" + money+";;Thread:"+Thread.currentThread().getName()); } catch (InterruptedException e) { e.printStackTrace(); } } } public void earn() { synchronized (this) { money = 500; /** * 如果只notify 那么购买成功的次数机会不可能是5 屌丝!请先赚钱!~ 屌丝!请先赚钱!~ 屌丝!请先赚钱!~ 屌丝!请先赚钱!~ 屌丝!请先赚钱!~ 土豪!购买成功!~~还剩下:400 */ // notify(); /** * notifyAll 则能购买成功五次 一定!~ * **/ notifyAll(); } } public static final Object lock = new Object() ; private boolean open = false; public int test() { // synchronized (lock) { //这样会报错 synchronized (this) { while (!open) { try { wait(); } catch (InterruptedException e) { e.printStackTrace(); } } } return 1; } public static void main(String[] args) { final WaitNotify waitNotify = new WaitNotify(); // testWaitNotify(waitNotify); new Thread(new Runnable() { @Override public void run() { int a = waitNotify.test(); } }).start(); } private static void testWaitNotify(final WaitNotify waitNotify) { for (int i = 0; i < 7; i++) { new Thread(new Runnable() { @Override public void run() { waitNotify.buy(); } }).start(); } // try { // Thread.sleep(1000); // } catch (InterruptedException e) { // e.printStackTrace(); // } waitNotify.earn(); } }
1,709
1,336
<filename>demo/demo-schema/src/main/java/org/apache/servicecomb/demo/server/WrappedAbstractModel.java /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.demo.server; import java.util.List; import java.util.Map; public class WrappedAbstractModel { private String name; private Map<Long, AbstractModel> mapModel; private List<AbstractModel> listModel; private AbstractModel model; public String getName() { return name; } public void setName(String name) { this.name = name; } public Map<Long, AbstractModel> getMapModel() { return mapModel; } public void setMapModel(Map<Long, AbstractModel> mapModel) { this.mapModel = mapModel; } public List<AbstractModel> getListModel() { return listModel; } public void setListModel(List<AbstractModel> listModel) { this.listModel = listModel; } public AbstractModel getModel() { return model; } public void setModel(AbstractModel model) { this.model = model; } }
511
487
/* * Copyright 2012-2014 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.erlang.debugger.node; import com.ericsson.otp.erlang.OtpErlangPid; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; public class ErlangProcessSnapshot { private final OtpErlangPid myPid; private final ErlangTraceElement myInit; private final String myStatus; private final String myBreakModule; private final int myBreakLine; private final String myExitReason; private final List<ErlangTraceElement> myStack; public ErlangProcessSnapshot(@NotNull OtpErlangPid pid, @NotNull ErlangTraceElement init, @NotNull String status, @Nullable String breakModule, int breakLine, @Nullable String exitReason, @NotNull List<ErlangTraceElement> stack) { myPid = pid; myInit = init; myStatus = status; myBreakModule = breakModule; myBreakLine = breakLine; myExitReason = exitReason; myStack = stack; } @NotNull public OtpErlangPid getPid() { return myPid; } @NotNull public String getPidString() { return myPid.toString(); } @NotNull public ErlangTraceElement getInit() { return myInit; } @NotNull public String getStatus() { return myStatus; } @Nullable public String getBreakModule() { return myBreakModule; } public int getBreakLine() { return myBreakLine; } @Nullable public String getExitReason() { return myExitReason; } @NotNull public List<ErlangTraceElement> getStack() { return myStack; } }
734
348
<gh_stars>100-1000 {"nom":"Saint-Mexant","circ":"1ère circonscription","dpt":"Corrèze","inscrits":1080,"abs":384,"votants":696,"blancs":21,"nuls":6,"exp":669,"res":[{"nuance":"SOC","nom":"M. <NAME>","voix":183},{"nuance":"REM","nom":"<NAME>","voix":165},{"nuance":"COM","nom":"<NAME>","voix":131},{"nuance":"FI","nom":"<NAME>","voix":72},{"nuance":"FN","nom":"Mme <NAME>","voix":42},{"nuance":"LR","nom":"Mme <NAME>","voix":32},{"nuance":"ECO","nom":"Mme <NAME>","voix":21},{"nuance":"ECO","nom":"<NAME>","voix":8},{"nuance":"EXG","nom":"Mme <NAME>","voix":6},{"nuance":"DLF","nom":"<NAME>","voix":6},{"nuance":"DIV","nom":"M. <NAME>","voix":2},{"nuance":"DIV","nom":"Mme <NAME>","voix":1}]}
278
882
<reponame>gigliovale/h2o<filename>h2o-samples/src/main/java/samples/expert/Frames.java package samples.expert; import water.Futures; import water.Job; import water.Key; import water.UKV; import water.deploy.VM; import water.fvec.*; import java.io.File; /** * Demonstration of H2O's Frame API, the distributed table-like data structure. */ public class Frames extends Job { public static void main(String[] args) throws Exception { Class job = Frames.class; samples.launchers.CloudLocal.launch(job, 1); //samples.launchers.CloudProcess.launch(job, 2); //samples.launchers.CloudConnect.launch(job, "localhost:54321"); //samples.launchers.CloudRemote.launchIPs(job, "192.168.1.161", "192.168.1.162"); //samples.launchers.CloudRemote.launchEC2(job, 4); } @Override protected void execImpl() { // From file parse(new File(VM.h2oFolder(), "smalldata/iris/iris.csv")); // Programmatically Frame frame = create( // new String[] { "A", "B" }, // new double[][] { // new double[] { 1.0, 2.0 }, // new double[] { 3.0, 4.0 } }); // Store frame in H2O's K/V store Key key = Key.make("MyFrame"); UKV.put(key, frame); } /** * Parse a dataset into a Frame. */ public static Frame parse(File file) { Key fkey = NFSFileVec.make(file); Key dest = Key.make(file.getName()); Frame frame = ParseDataset2.parse(dest, new Key[] { fkey }); return frame; } /** * Creates a frame programmatically. */ public static Frame create(String[] headers, double[][] rows) { Futures fs = new Futures(); Vec[] vecs = new Vec[rows[0].length]; Key keys[] = new Vec.VectorGroup().addVecs(vecs.length); for( int c = 0; c < vecs.length; c++ ) { AppendableVec vec = new AppendableVec(keys[c]); NewChunk chunk = new NewChunk(vec, 0); for( int r = 0; r < rows.length; r++ ) chunk.addNum(rows[r][c]); chunk.close(0, fs); vecs[c] = vec.close(fs); } fs.blockForPending(); return new Frame(headers, vecs); } }
834
992
<reponame>adinkwok/android_frameworks_support /* * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.transition; import static org.junit.Assert.assertEquals; import android.graphics.Path; import android.support.test.filters.SmallTest; import android.support.test.runner.AndroidJUnit4; import org.junit.Test; import org.junit.runner.RunWith; @SmallTest @RunWith(AndroidJUnit4.class) public class ArcMotionTest extends PathMotionTest { @Test public void test90Quadrants() { ArcMotion arcMotion = new ArcMotion(); arcMotion.setMaximumAngle(90); Path expected = arcWithPoint(0, 100, 100, 0, 100, 100); Path path = arcMotion.getPath(0, 100, 100, 0); assertPathMatches(expected, path); expected = arcWithPoint(100, 0, 0, -100, 0, 0); path = arcMotion.getPath(100, 0, 0, -100); assertPathMatches(expected, path); expected = arcWithPoint(0, -100, -100, 0, 0, 0); path = arcMotion.getPath(0, -100, -100, 0); assertPathMatches(expected, path); expected = arcWithPoint(-100, 0, 0, 100, -100, 100); path = arcMotion.getPath(-100, 0, 0, 100); assertPathMatches(expected, path); } @Test public void test345Triangles() { // 3-4-5 triangles are easy to calculate the control points ArcMotion arcMotion = new ArcMotion(); arcMotion.setMaximumAngle(90); Path expected; Path path; expected = arcWithPoint(0, 120, 160, 0, 125, 120); path = arcMotion.getPath(0, 120, 160, 0); assertPathMatches(expected, path); expected = arcWithPoint(0, 160, 120, 0, 120, 125); path = arcMotion.getPath(0, 160, 120, 0); assertPathMatches(expected, path); expected = arcWithPoint(-120, 0, 0, 160, -120, 125); path = arcMotion.getPath(-120, 0, 0, 160); assertPathMatches(expected, path); expected = arcWithPoint(-160, 0, 0, 120, -125, 120); path = arcMotion.getPath(-160, 0, 0, 120); assertPathMatches(expected, path); expected = arcWithPoint(0, -120, -160, 0, -35, 0); path = arcMotion.getPath(0, -120, -160, 0); assertPathMatches(expected, path); expected = arcWithPoint(0, -160, -120, 0, 0, -35); path = arcMotion.getPath(0, -160, -120, 0); assertPathMatches(expected, path); expected = arcWithPoint(120, 0, 0, -160, 0, -35); path = arcMotion.getPath(120, 0, 0, -160); assertPathMatches(expected, path); expected = arcWithPoint(160, 0, 0, -120, 35, 0); path = arcMotion.getPath(160, 0, 0, -120); assertPathMatches(expected, path); } private static Path arcWithPoint(float startX, float startY, float endX, float endY, float eX, float eY) { float c1x = (eX + startX) / 2; float c1y = (eY + startY) / 2; float c2x = (eX + endX) / 2; float c2y = (eY + endY) / 2; Path path = new Path(); path.moveTo(startX, startY); path.cubicTo(c1x, c1y, c2x, c2y, endX, endY); return path; } @Test public void testMaximumAngle() { ArcMotion arcMotion = new ArcMotion(); arcMotion.setMaximumAngle(45f); assertEquals(45f, arcMotion.getMaximumAngle(), 0.0f); float ratio = (float) Math.tan(Math.PI / 8); float ex = 50 + (50 * ratio); float ey = ex; Path expected = arcWithPoint(0, 100, 100, 0, ex, ey); Path path = arcMotion.getPath(0, 100, 100, 0); assertPathMatches(expected, path); } @Test public void testMinimumHorizontalAngle() { ArcMotion arcMotion = new ArcMotion(); arcMotion.setMinimumHorizontalAngle(45); assertEquals(45, arcMotion.getMinimumHorizontalAngle(), 0.0f); float ex = 37.5f; float ey = (float) (Math.tan(Math.PI / 4) * 50); Path expected = arcWithPoint(0, 0, 100, 50, ex, ey); Path path = arcMotion.getPath(0, 0, 100, 50); assertPathMatches(expected, path); // Pretty much the same, but follows a different path. expected = arcWithPoint(0, 0, 100.001f, 50, ex, ey); path = arcMotion.getPath(0, 0, 100.001f, 50); assertPathMatches(expected, path); // Moving in the opposite direction. expected = arcWithPoint(100, 50, 0, 0, ex, ey); path = arcMotion.getPath(100, 50, 0, 0); assertPathMatches(expected, path); // With x < y. ex = 0; ey = (float) (Math.tan(Math.PI / 4) * 62.5f); expected = arcWithPoint(0, 0, 50, 100, ex, ey); path = arcMotion.getPath(0, 0, 50, 100); assertPathMatches(expected, path); // Pretty much the same, but follows a different path. expected = arcWithPoint(0, 0, 50, 100.001f, ex, ey); path = arcMotion.getPath(0, 0, 50, 100.001f); assertPathMatches(expected, path); // Moving in the opposite direction. expected = arcWithPoint(50, 100, 0, 0, ex, ey); path = arcMotion.getPath(50, 100, 0, 0); assertPathMatches(expected, path); } @Test public void testMinimumVerticalAngle() { ArcMotion arcMotion = new ArcMotion(); arcMotion.setMinimumVerticalAngle(45); assertEquals(45, arcMotion.getMinimumVerticalAngle(), 0.0f); float ex = 0; float ey = 62.5f; Path expected = arcWithPoint(0, 0, 50, 100, ex, ey); Path path = arcMotion.getPath(0, 0, 50, 100); assertPathMatches(expected, path); // Pretty much the same, but follows a different path. expected = arcWithPoint(0, 0, 50, 100.001f, ex, ey); path = arcMotion.getPath(0, 0, 50, 100.001f); assertPathMatches(expected, path); // Moving in opposite direction. expected = arcWithPoint(50, 100, 0, 0, ex, ey); path = arcMotion.getPath(50, 100, 0, 0); assertPathMatches(expected, path); // With x > y. ex = (float) (Math.tan(Math.PI / 4) * 37.5f); ey = 50; expected = arcWithPoint(0, 0, 100, 50, ex, ey); path = arcMotion.getPath(0, 0, 100, 50); assertPathMatches(expected, path); // Pretty much the same, but follows a different path. expected = arcWithPoint(0, 0, 100.001f, 50, ex, ey); path = arcMotion.getPath(0, 0, 100.001f, 50); assertPathMatches(expected, path); // Moving in opposite direction. expected = arcWithPoint(100, 50, 0, 0, ex, ey); path = arcMotion.getPath(100, 50, 0, 0); assertPathMatches(expected, path); } }
3,058
1,875
/* * Copyright 2019 konsoletyper. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.teavm.backend.lowlevel.generate; import org.teavm.model.FieldReference; import org.teavm.model.MethodDescriptor; import org.teavm.model.MethodReference; import org.teavm.model.ValueType; public interface NameProvider { String forMethod(MethodReference method); String forVirtualMethod(MethodDescriptor method); String forStaticField(FieldReference field); String forMemberField(FieldReference field); String forClass(String className); String forClassInitializer(String className); String forClassSystemInitializer(ValueType type); String forClassClass(String className); String forClassInstance(ValueType type); String forSupertypeFunction(ValueType type); }
382
1,208
<gh_stars>1000+ // // TLEmojiKeyboard+EmojiGroupControl.h // TLChat // // Created by 李伯坤 on 16/3/17. // Copyright © 2016年 李伯坤. All rights reserved. // #import "TLEmojiKeyboard.h" @interface TLEmojiKeyboard (EmojiGroupControl) <TLEmojiGroupControlDelegate> @end
122
340
<reponame>the-moisrex/eve<filename>include/eve/module/special/regular/dawson.hpp //================================================================================================== /* EVE - Expressive Vector Engine Copyright : EVE Contributors & Maintainers SPDX-License-Identifier: MIT */ //================================================================================================== #pragma once #include <eve/detail/overload.hpp> namespace eve { //================================================================================================ //! @addtogroup special //! @{ //! @var dawson //! //! @brief Callable object computing the dawson function. \f$\displaystyle D_+(x)=e^{-x^2}\int_0^{x} e^{t^2} \mbox{d}t\f$ //! //! **Required header:** `#include <eve/function/dawson.hpp>` //! //! #### Members Functions //! //! | Member | Effect | //! |:-------------|:-----------------------------------------------------------| //! | `operator()` | the dawson operation | //! | `operator[]` | Construct a conditional version of current function object | //! //! --- //! //! ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp} //! template< value T > auto operator()( T x) const noexcept; //! ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //! //! **Parameters** //! //!`x`: [value](@ref eve::value). //! //! **Return value** //! //!Returns [elementwise](@ref glossary_elementwise) the value of the dawson function. //! //! The result type is of the same type as the parameter. //! //! --- //! //! ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~{.cpp} //! auto operator[]( conditional_expression auto cond ) const noexcept; //! ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //! //! Higher-order function generating a masked version of eve::dawson //! //! **Parameters** //! //! `cond` : conditional expression //! //! **Return value** //! //! A Callable object so that the expression `dawson[cond](x, ...)` is equivalent to `if_else(cond,dawson(x, ...),x)` //! //! --- //! //! #### Supported decorators //! //! * eve::diff, eve::diff_1st, eve::diff_nth //! //! **Required header:** `#include <eve/function/diff/dawson.hpp>` //! //! The expression `diff(dawson)(x)` computes the derivative of the function at `x`. //! //! #### Example //! //! @godbolt{doc/special/dawson.cpp} //! //! @} //================================================================================================ EVE_MAKE_CALLABLE(dawson_, dawson); } #include <eve/module/special/regular/impl/dawson.hpp>
888
3,710
<gh_stars>1000+ #pragma once #ifndef OPTIMIZE_FOR_LP64_INCLUDED #define OPTIMIZE_FOR_LP64_INCLUDED /* ========================================================================= */ /* ***************************************************************************** * OSSERVAZIONI * ***************************************************************************** ____________OSS 1:___________________________________________________________ se devo fare DUE MOLTIPLICAZIONI 13 bit * 8 bit posso farle in un colpo solo, ad esempio: siano X = xxxxxxxxxxxxx S = ssssssss Y = yyyyyyyyyyyyy T = tttttttt e devo calcolare U = X * S V = Y * T posso farlo in un colpo solo impacchettando i bit cosi': A = X 0 00000000 Y = xxxxxxxxxxxxx 0 00000000 yyyyyyyyyyyyy B = 00000 S 0 00000000 00000 T = 00000ssssssss 0 00000000 00000tttttttt ora se faccio C = A * B si ha C = U ?????????????????????? V = = uuuuuuuuuuuuuuuuuuuuu ?????????????????????? vvvvvvvvvvvvvvvvvvvvv dove C e' di 64 bit; cioe' i primi 21 bit sono X * S = U e gli ultimi 21 sono Y * T = V ____________OSS 2:___________________________________________________________ se devo fare DUE MOLTIPLICAZIONI 16 bit * 16 bit del tipo X * S = U Y * S = V con #X = 16, #Y = 16, #S = 16 (dove l'operatore '#' da' come risultato il numero di bit di cui e' composto un numero intero) posso farle tutte e due in un solo colpo impacchettando i bit cosi': O = 0000000000000000, #O = 16 A = X O Y , #A = 48 B = S , #B = 16 C = A * B , #C = 64 dove i primi 32 bit sono X * S e i secondi 32 bit sono Y * S ____________OSS 3:___________________________________________________________ se devo fare QUATTRO MOLTIPLICAZIONI 8 bit * 8 bit del tipo X * S = I #X = 8, #S = 8, #I = 16 Y * S = J #Y = 8, #S = 8, #J = 16 Z * S = K #Z = 8, #S = 8, #K = 16 W * S = L #W = 8, #S = 8, #L = 16 posso farle tutte e due in un solo colpo impacchettando i bit cosi': O = 00000000 #O = 8 C = XOYOZOW * OOOOOOS #C = 64 dove I sono i primi 16 bit, J sono i secondi 16 bit, K sono i terzi 16 bit, L i quarti 16 bit _____________________________________________________________________________ ***************************************************************************** */ /* ========================================================================= */ #define OPTIMIZE_FOR_LP64 /* ========================================================================= */ #define MASK_FIRST_OF_3_X_16BIT 0x7FFFC00000000 #define MASK_SECOND_OF_3_X_16BIT 0x3FFFE0000 #define MASK_THIRD_OF_3_X_16BIT 0x1FFFF #define FIRST_OF_3_X_16BIT(x) (x) >> 34 #define SECOND_OF_3_X_16BIT(x) ((x)&MASK_SECOND_OF_3_X_16BIT) >> 17; #define THIRD_OF_3_X_16BIT(x) (x) & MASK_THIRD_OF_3_X_16BIT; /* ========================================================================= */ #define MASK_FIRST_OF_2_X_24BIT 0x3FFFFFE000000 #define MASK_SECOND_OF_2_X_24BIT 0x1FFFFFF #define FIRST_OF_2_X_24BIT(x) (x) >> 25 #define SECOND_OF_2_X_24BIT(x) (x) & MASK_SECOND_OF_2_X_24BIT /* ========================================================================= */ #define MASK_FIRST_OF_2_X_32BIT 0xFFFFFFFF00000000 #define MASK_SECOND_OF_2_X_32BIT 0xFFFFFFFF #define FIRST_OF_2_X_32BIT(x) (x) >> 32 #define SECOND_OF_2_X_32BIT(x) (x) & MASK_SECOND_OF_2_X_32BIT /* ========================================================================= */ typedef unsigned char UINT8; typedef unsigned short UINT16; typedef unsigned int UINT24; typedef unsigned int UINT32; typedef unsigned long UINT50; typedef unsigned long UINT51; typedef unsigned long UINT64; /* ========================================================================= */ #if 0 /* esegue a1+b1, a2+c2, a3+c3 in un'unica operazione */ UINT64 add_3_x_16bit ( UINT16 a1, UINT16 a2, UINT16 a3, UINT16 b1, UINT16 b2, UINT16 b3 ); /* esegue a1+b1, a2+b2 in un'unica operazione */ UINT50 add_2_x_24bit ( UINT24 a1, UINT24 a2, UINT24 b1, UINT24 b2 ); /* esegue a1*b, a2*b in un'unica operazione */ UINT64 mult_2_x_16bit ( UINT16 a1, UINT16 a2, UINT16 b ); #endif /* ========================================================================= */ /* ------------------------------------------------------------------------- */ #define ADD_3_X_16BIT(a1, a2, a3, b1, b2, b3) \ (0L | (UINT64)(a1) << 34 | (UINT64)(a2) << 17 | (a3)) + \ (0L | (UINT64)(b1) << 34 | (UINT64)(b2) << 17 | (b3)) inline UINT64 add_3_x_16bit(UINT16 a1, UINT16 a2, UINT16 a3, UINT16 b1, UINT16 b2, UINT16 b3) { return (0L | (UINT64)a1 << 34 | (UINT64)a2 << 17 | a3) + (0L | (UINT64)b1 << 34 | (UINT64)b2 << 17 | b3); } /* ------------------------------------------------------------------------- */ #define ADD_2_X_24BIT(a1, a2, b1, b2) \ (0L | (UINT64)(a1) << 25 | (a2)) + (0L | (UINT64)(b1) << 25 | (b2)) inline UINT50 add_2_x_24bit(UINT24 a1, UINT24 a2, UINT24 b1, UINT24 b2) { return (0L | (UINT64)a1 << 25 | a2) + (0L | (UINT64)b1 << 25 | b2); } /* ------------------------------------------------------------------------- */ #define MULT_2_X_16BIT(a1, a2, b) \ ((UINT64)b) * (((UINT64)(a1) << 32) | (UINT64)a2) inline UINT64 mult_2_x_16bit(UINT16 a1, UINT16 a2, UINT16 b) { return (0L | (UINT64)a1 << 32 | a2) * b; } #endif
2,358
309
<filename>src/Cxx/Visualization/TransformActor.cxx #include <vtkSmartPointer.h> #include <vtkConeSource.h> #include <vtkTransform.h> #include <vtkPolyData.h> #include <vtkPolyDataMapper.h> #include <vtkActor.h> #include <vtkRenderWindow.h> #include <vtkRenderer.h> #include <vtkRenderWindowInteractor.h> int main(int, char *[]) { // Create a cone vtkSmartPointer<vtkConeSource> coneSource1 = vtkSmartPointer<vtkConeSource>::New(); coneSource1->Update(); vtkSmartPointer<vtkConeSource> coneSource2 = vtkSmartPointer<vtkConeSource>::New(); coneSource2->Update(); vtkSmartPointer<vtkPolyDataMapper> mapper1 = vtkSmartPointer<vtkPolyDataMapper>::New(); mapper1->SetInputConnection(coneSource1->GetOutputPort()); vtkSmartPointer<vtkActor> actor1 = vtkSmartPointer<vtkActor>::New(); actor1->SetMapper(mapper1); // Create a second, transformed cone vtkSmartPointer<vtkPolyDataMapper> mapper2 = vtkSmartPointer<vtkPolyDataMapper>::New(); mapper2->SetInputConnection(coneSource2->GetOutputPort()); vtkSmartPointer<vtkActor> actor2 = vtkSmartPointer<vtkActor>::New(); actor2->SetMapper(mapper2); vtkSmartPointer<vtkTransform> transform = vtkSmartPointer<vtkTransform>::New(); transform->PostMultiply(); //this is the key line transform->RotateZ(90.0); actor2->SetUserTransform(transform); // Create a renderer, render window, and interactor vtkSmartPointer<vtkRenderer> renderer = vtkSmartPointer<vtkRenderer>::New(); vtkSmartPointer<vtkRenderWindow> renderWindow = vtkSmartPointer<vtkRenderWindow>::New(); renderWindow->AddRenderer(renderer); vtkSmartPointer<vtkRenderWindowInteractor> renderWindowInteractor = vtkSmartPointer<vtkRenderWindowInteractor>::New(); renderWindowInteractor->SetRenderWindow(renderWindow); // Add the actor to the scene renderer->AddActor(actor1); renderer->AddActor(actor2); renderer->SetBackground(1,1,1); // Background color white // Render and interact renderWindow->Render(); renderWindowInteractor->Start(); return EXIT_SUCCESS; }
774
839
<reponame>cjh0613/baserow from baserow.api.exceptions import UnknownFieldProvided class UnknownFieldRaisesExceptionSerializerMixin: """ Mixin to a DRF serializer class to raise an exception if data with unknown fields is provided to the serializer. """ def validate(self, data): if hasattr(self, "initial_data"): unknown_keys = set(self.initial_data.keys()) - set(self.fields.keys()) if unknown_keys: raise UnknownFieldProvided( f"Received unknown fields: {unknown_keys}. Please check " "the api documentation and only provide " "valid fields." ) return data
305
2,542
// ------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License (MIT). See License.txt in the repo root for license information. // ------------------------------------------------------------ #include "stdafx.h" using namespace Common; using namespace HttpServer; using namespace Transport; bool HttpServerImpl::OpenLinuxAsyncServiceOperation::StartLinuxServiceOperation() { return server_->StartOpen(listenUri_, reqHandler_); } ErrorCode HttpServerImpl::OpenLinuxAsyncServiceOperation::End(__in AsyncOperationSPtr const& operation) { auto thisPtr = AsyncOperation::End<OpenLinuxAsyncServiceOperation>(operation); return thisPtr->Error; }
187
1,755
#!/usr/bin/env python import vtk from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() import sys # create pipeline - structured grid # pl3d = vtk.vtkMultiBlockPLOT3DReader() pl3d.SetXYZFileName("" + str(VTK_DATA_ROOT) + "/Data/combxyz.bin") pl3d.SetQFileName("" + str(VTK_DATA_ROOT) + "/Data/combq.bin") pl3d.SetScalarFunctionNumber(100) pl3d.SetVectorFunctionNumber(202) pl3d.Update() output = pl3d.GetOutput().GetBlock(0) gf = vtk.vtkGeometryFilter() gf.SetInputData(output) gMapper = vtk.vtkPolyDataMapper() gMapper.SetInputConnection(gf.GetOutputPort()) gMapper.SetScalarRange(output.GetScalarRange()) gActor = vtk.vtkActor() gActor.SetMapper(gMapper) gf2 = vtk.vtkGeometryFilter() gf2.SetInputData(output) gf2.ExtentClippingOn() gf2.SetExtent(10,17,-6,6,23,37) gf2.PointClippingOn() gf2.SetPointMinimum(0) gf2.SetPointMaximum(10000) gf2.CellClippingOn() gf2.SetCellMinimum(0) gf2.SetCellMaximum(7500) g2Mapper = vtk.vtkPolyDataMapper() g2Mapper.SetInputConnection(gf2.GetOutputPort()) g2Mapper.SetScalarRange(output.GetScalarRange()) g2Actor = vtk.vtkActor() g2Actor.SetMapper(g2Mapper) g2Actor.AddPosition(0,15,0) # create pipeline - poly data # gf3 = vtk.vtkGeometryFilter() gf3.SetInputConnection(gf.GetOutputPort()) g3Mapper = vtk.vtkPolyDataMapper() g3Mapper.SetInputConnection(gf3.GetOutputPort()) g3Mapper.SetScalarRange(output.GetScalarRange()) g3Actor = vtk.vtkActor() g3Actor.SetMapper(g3Mapper) g3Actor.AddPosition(0,0,15) gf4 = vtk.vtkGeometryFilter() gf4.SetInputConnection(gf2.GetOutputPort()) gf4.ExtentClippingOn() gf4.SetExtent(10,17,-6,6,23,37) gf4.PointClippingOn() gf4.SetPointMinimum(0) gf4.SetPointMaximum(10000) gf4.CellClippingOn() gf4.SetCellMinimum(0) gf4.SetCellMaximum(7500) g4Mapper = vtk.vtkPolyDataMapper() g4Mapper.SetInputConnection(gf4.GetOutputPort()) g4Mapper.SetScalarRange(output.GetScalarRange()) g4Actor = vtk.vtkActor() g4Actor.SetMapper(g4Mapper) g4Actor.AddPosition(0,15,15) # create pipeline - unstructured grid # s = vtk.vtkSphere() s.SetCenter(output.GetCenter()) s.SetRadius(100.0) #everything eg = vtk.vtkExtractGeometry() eg.SetInputData(output) eg.SetImplicitFunction(s) gf5 = vtk.vtkGeometryFilter() gf5.SetInputConnection(eg.GetOutputPort()) g5Mapper = vtk.vtkPolyDataMapper() g5Mapper.SetInputConnection(gf5.GetOutputPort()) g5Mapper.SetScalarRange(output.GetScalarRange()) g5Actor = vtk.vtkActor() g5Actor.SetMapper(g5Mapper) g5Actor.AddPosition(0,0,30) gf6 = vtk.vtkGeometryFilter() gf6.SetInputConnection(eg.GetOutputPort()) gf6.ExtentClippingOn() gf6.SetExtent(10,17,-6,6,23,37) gf6.PointClippingOn() gf6.SetPointMinimum(0) gf6.SetPointMaximum(10000) gf6.CellClippingOn() gf6.SetCellMinimum(0) gf6.SetCellMaximum(7500) g6Mapper = vtk.vtkPolyDataMapper() g6Mapper.SetInputConnection(gf6.GetOutputPort()) g6Mapper.SetScalarRange(output.GetScalarRange()) g6Actor = vtk.vtkActor() g6Actor.SetMapper(g6Mapper) g6Actor.AddPosition(0,15,30) # create pipeline - rectilinear grid # rgridReader = vtk.vtkRectilinearGridReader() rgridReader.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/RectGrid2.vtk") rgridReader.Update() gf7 = vtk.vtkGeometryFilter() gf7.SetInputConnection(rgridReader.GetOutputPort()) g7Mapper = vtk.vtkPolyDataMapper() g7Mapper.SetInputConnection(gf7.GetOutputPort()) g7Mapper.SetScalarRange(rgridReader.GetOutput().GetScalarRange()) g7Actor = vtk.vtkActor() g7Actor.SetMapper(g7Mapper) g7Actor.SetScale(3,3,3) gf8 = vtk.vtkGeometryFilter() gf8.SetInputConnection(rgridReader.GetOutputPort()) gf8.ExtentClippingOn() gf8.SetExtent(0,1,-2,2,0,4) gf8.PointClippingOn() gf8.SetPointMinimum(0) gf8.SetPointMaximum(10000) gf8.CellClippingOn() gf8.SetCellMinimum(0) gf8.SetCellMaximum(7500) g8Mapper = vtk.vtkPolyDataMapper() g8Mapper.SetInputConnection(gf8.GetOutputPort()) g8Mapper.SetScalarRange(rgridReader.GetOutput().GetScalarRange()) g8Actor = vtk.vtkActor() g8Actor.SetMapper(g8Mapper) g8Actor.SetScale(3,3,3) g8Actor.AddPosition(0,15,0) # Create the RenderWindow, Renderer and both Actors # ren1 = vtk.vtkRenderer() renWin = vtk.vtkRenderWindow() renWin.AddRenderer(ren1) iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) ren1.AddActor(gActor) ren1.AddActor(g2Actor) ren1.AddActor(g3Actor) ren1.AddActor(g4Actor) ren1.AddActor(g5Actor) ren1.AddActor(g6Actor) ren1.AddActor(g7Actor) ren1.AddActor(g8Actor) renWin.SetSize(340,550) cam1 = ren1.GetActiveCamera() cam1.SetClippingRange(84,174) cam1.SetFocalPoint(5.22824,6.09412,35.9813) cam1.SetPosition(100.052,62.875,102.818) cam1.SetViewUp(-0.307455,-0.464269,0.830617) iren.Initialize() # prevent the tk window from showing up then start the event loop # test that the cell data is properly mapped in the output ug = vtk.vtkUnstructuredGrid() p = vtk.vtkPoints() p.InsertNextPoint(0, 0, 0) p.InsertNextPoint(1, 0, 0) p.InsertNextPoint(2, 0, 0) p.InsertNextPoint(3, 0, 0) ug.SetPoints(p) ug.GetNumberOfPoints() ug.Allocate(4) lpts = [0, 1] ug.InsertNextCell(vtk.VTK_LINE, 2, lpts) vpts = [1] ug.InsertNextCell(vtk.VTK_VERTEX, 1, vpts) lpts = [2, 3] ug.InsertNextCell(vtk.VTK_LINE, 2, lpts) vpts = [3] ug.InsertNextCell(vtk.VTK_VERTEX, 1, vpts) aa = vtk.vtkIntArray() aa.InsertNextValue(0) aa.InsertNextValue(1) aa.InsertNextValue(2) aa.InsertNextValue(3) aa.SetName('testarray') ug.GetCellData().AddArray(aa) gf = vtk.vtkGeometryFilter() gf.SetInputData(ug) gf.Update() pd = gf.GetOutput() oa = pd.GetCellData().GetArray('testarray') # Check that the ordering of polydata arrays is correct. Verts should come before # lines. correctcelldata = [1, 3, 0, 2] if oa.GetValue(0) != correctcelldata[0] and oa.GetValue(0) != correctcelldata[1]: print('Bad celldata of test array') sys.exit(1) if oa.GetValue(1) != correctcelldata[0] and oa.GetValue(1) != correctcelldata[1]: print('Bad celldata of test array') sys.exit(1) if oa.GetValue(2) != correctcelldata[2] and oa.GetValue(2) != correctcelldata[3]: print('Bad celldata of test array') sys.exit(1) if oa.GetValue(3) != correctcelldata[2] and oa.GetValue(3) != correctcelldata[3]: print('Bad celldata of test array') sys.exit(1) # --- end of script --
2,623
2,053
#include "mkv-reader.h" #include "mov-format.h" #include "mov-writer.h" #include "webm-vpx.h" #include "rtsp-payloads.h" #include <stdio.h> #include <stdlib.h> #include <string.h> #include <assert.h> static uint8_t s_buffer[4 * 1024 * 1024]; static int s_audio_track; static int s_video_track; static int s_subtitle_track; extern "C" const struct mkv_buffer_t* mkv_file_buffer(void); extern "C" const struct mov_buffer_t* mov_file_buffer(void); static inline const char* ftimestamp(int64_t timestamp, char* buf) { uint32_t t = (uint32_t)timestamp; sprintf(buf, "%02u:%02u:%02u.%03u", t / 3600000, (t / 60000) % 60, (t / 1000) % 60, t % 1000); return buf; } static void mkv_reader_test_onread(void* mov, uint32_t track, const void* buffer, size_t bytes, int64_t pts, int64_t dts, int flags) { static char s_pts[64], s_dts[64]; if (s_video_track == track) { static int64_t v_pts, v_dts; printf("[V] pts: %s, dts: %s, diff: %03d/%03d, bytes: %u%s\n", ftimestamp(pts, s_pts), ftimestamp(dts, s_dts), (int)(pts - v_pts), (int)(dts - v_dts), (unsigned int)bytes, flags ? " [I]" : ""); v_pts = pts; v_dts = dts; mov_writer_write((mov_writer_t*)mov, s_video_track-1, buffer, bytes, pts, dts, flags & MKV_FLAGS_KEYFRAME ? MOV_AV_FLAG_KEYFREAME : 0); } else if (s_audio_track == track) { static int64_t a_pts, a_dts; printf("[A] pts: %s, dts: %s, diff: %03d/%03d, bytes: %u\n", ftimestamp(pts, s_pts), ftimestamp(dts, s_dts), (int)(pts - a_pts), (int)(dts - a_dts), (unsigned int)bytes); a_pts = pts; a_dts = dts; mov_writer_write((mov_writer_t*)mov, s_audio_track-1, buffer, bytes, pts, dts, 0); } else if (s_subtitle_track == track) { static int64_t t_pts, t_dts; printf("[S] pts: %s, dts: %s, diff: %03d/%03d, bytes: %u, text: %.*s\n", ftimestamp(pts, s_pts), ftimestamp(dts, s_dts), (int)(pts - t_pts), (int)(dts - t_dts), (unsigned int)bytes, (int)bytes - 2, (const char*)buffer + 2); t_pts = pts; t_dts = dts; } else { static int64_t x_pts, x_dts; printf("[%d] pts: %s, dts: %s, diff: %03d/%03d, bytes: %u%s\n", track, ftimestamp(pts, s_pts), ftimestamp(dts, s_dts), (int)(pts - x_pts), (int)(dts - x_dts), (unsigned int)bytes, flags ? " [I]" : ""); x_pts = pts; x_dts = dts; //assert(0); } } static uint8_t mkv_codec_id_to_mov_object_id(enum mkv_codec_t codec) { int i = avpayload_find_by_mkv(codec); if (-1 == i) return 0; return s_payloads[i].mov; } static void mkv_video_info(void* mov, uint32_t track, enum mkv_codec_t codec, int width, int height, const void* extra, size_t bytes) { // TODO: if (bytes < 1 && (MKV_CODEC_VIDEO_VP8 == codec || MKV_CODEC_VIDEO_VP9 == codec)) { int w, h; uint8_t buffer[128]; struct webm_vpx_t vpx; memset(&vpx, 0, sizeof(vpx)); webm_vpx_codec_configuration_record_from_vp9(&vpx, &w, &h, NULL, 0); bytes = webm_vpx_codec_configuration_record_save(&vpx, buffer, sizeof(buffer)); extra = buffer; // override } s_video_track = track; int t = mov_writer_add_video((mov_writer_t*)mov, mkv_codec_id_to_mov_object_id(codec), width, height, extra, bytes); assert(t == s_video_track-1); } static void mkv_audio_info(void* mov, uint32_t track, enum mkv_codec_t codec, int channel_count, int bit_per_sample, int sample_rate, const void* extra, size_t bytes) { s_audio_track = track; int t = mov_writer_add_audio((mov_writer_t*)mov, mkv_codec_id_to_mov_object_id(codec), channel_count, bit_per_sample, sample_rate, extra, bytes); assert(t == s_audio_track-1); } static void mkv_subtitle_info(void* /*param*/, uint32_t track, enum mkv_codec_t codec, const void* /*extra*/, size_t /*bytes*/) { s_subtitle_track = track; } void mkv_2_mp4_test(const char* src, const char* mp4) { FILE* fp = fopen(src, "rb"); mkv_reader_t* mkv = mkv_reader_create(mkv_file_buffer(), fp); uint64_t duration = mkv_reader_getduration(mkv); FILE* wfp = fopen(mp4, "wb"); mov_writer_t* mov = mov_writer_create(mov_file_buffer(), wfp, 0); struct mkv_reader_trackinfo_t info = { mkv_video_info, mkv_audio_info, mkv_subtitle_info }; mkv_reader_getinfo(mkv, &info, mov); while (mkv_reader_read(mkv, s_buffer, sizeof(s_buffer), mkv_reader_test_onread, mov) > 0) { } duration /= 2; mkv_reader_seek(mkv, (int64_t*)&duration); mov_writer_destroy(mov); mkv_reader_destroy(mkv); fclose(fp); fclose(wfp); }
1,978
1,350
<reponame>billwert/azure-sdk-for-java<gh_stars>1000+ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.security.models; import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import java.time.Duration; /** Number of device to cloud messages (MQTT protocol) is not in allowed range. */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "ruleType") @JsonTypeName("MqttD2CMessagesNotInAllowedRange") @Fluent public final class MqttD2CMessagesNotInAllowedRange extends TimeWindowCustomAlertRule { @JsonIgnore private final ClientLogger logger = new ClientLogger(MqttD2CMessagesNotInAllowedRange.class); /** {@inheritDoc} */ @Override public MqttD2CMessagesNotInAllowedRange withTimeWindowSize(Duration timeWindowSize) { super.withTimeWindowSize(timeWindowSize); return this; } /** {@inheritDoc} */ @Override public MqttD2CMessagesNotInAllowedRange withMinThreshold(int minThreshold) { super.withMinThreshold(minThreshold); return this; } /** {@inheritDoc} */ @Override public MqttD2CMessagesNotInAllowedRange withMaxThreshold(int maxThreshold) { super.withMaxThreshold(maxThreshold); return this; } /** {@inheritDoc} */ @Override public MqttD2CMessagesNotInAllowedRange withIsEnabled(boolean isEnabled) { super.withIsEnabled(isEnabled); return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ @Override public void validate() { super.validate(); } }
731
586
entryPoints = [ "base", "button", "card", "checkbox", "chips", "data-table", "dialog", "dom", "drawer", "elevation", "fab", "floating-label", "form-field", "line-ripple", "linear-progress", "list", "menu", "menu-surface", "notched-outline", "overlay", "icon", "icon-button", "image-list", "radio", "ripple", "select", "scrolling", "slider", "snackbar", "switch", "tab", "tab-bar", "tab-indicator", "tab-scroller", "textfield", "top-app-bar", "typography", ] # List of all non-testing entry-points of the Angular MDC package. MDC_ENTRYPOINTS = [ ep for ep in entryPoints if not "/testing" in ep ] # List of all testing entry-points of the Angular MDC package. MDC_TESTING_ENTRYPOINTS = [ ep for ep in entryPoints if not ep in MDC_ENTRYPOINTS ] # List of all non-testing entry-point targets of the angular-mdc package. MDC_TARGETS = ["//packages"] + ["//packages/%s" % ep for ep in MDC_ENTRYPOINTS] # List of all testing entry-point targets of the Angular MDC package. MDC_TESTING_TARGETS = ["//packages/%s" % ep for ep in MDC_TESTING_ENTRYPOINTS]
545
688
<reponame>keremcubuk/react-native-electrode-bridge<filename>android/electrode-reactnative-bridge/src/main/java/com/walmartlabs/electrode/reactnative/bridge/ElectrodeBridgeRequest.java /* * Copyright 2017 WalmartLabs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.walmartlabs.electrode.reactnative.bridge; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import com.facebook.react.bridge.ReadableMap; import com.walmartlabs.electrode.reactnative.bridge.helpers.Logger; public class ElectrodeBridgeRequest extends BridgeMessage { private static final String TAG = ElectrodeBridgeRequest.class.getSimpleName(); private static final int DEFAULT_REQUEST_TIMEOUT_MS = 5000; public static final int NO_TIMEOUT = -1; private final int mTimeoutMs; private boolean isJsInitiated; @Nullable public static ElectrodeBridgeRequest create(@NonNull ReadableMap messageMap) { ElectrodeBridgeRequest bridgeRequest = null; if (isValid(messageMap, BridgeMessage.Type.REQUEST)) { bridgeRequest = new ElectrodeBridgeRequest(messageMap); } else { Logger.w(TAG, "Unable to createMessage a bridge message, invalid data received(%s)", messageMap); } return bridgeRequest; } private ElectrodeBridgeRequest(@NonNull ReadableMap messageMap) { super(messageMap); mTimeoutMs = NO_TIMEOUT; isJsInitiated = true; } private ElectrodeBridgeRequest(Builder requestBuilder) { super(requestBuilder.mName, getUUID(), BridgeMessage.Type.REQUEST, requestBuilder.mData); mTimeoutMs = requestBuilder.mTimeoutMs; } /** * @return The timeout of this request */ public int getTimeoutMs() { return this.mTimeoutMs; } /** * Indicates if a request was initiated by JS. * * @return true | false */ public boolean isJsInitiated() { return isJsInitiated; } public static class Builder { private final String mName; private Object mData; private int mTimeoutMs; /** * Initializes a new request builder * * @param name The name of the request to build */ public Builder(String name) { mName = name; mTimeoutMs = DEFAULT_REQUEST_TIMEOUT_MS; mData = null; } /** * Specifies the request timeout * * @param timeoutMs The timeout in milliseconds * @return Current builder instance for chaining */ @SuppressWarnings("unused") public Builder withTimeout(int timeoutMs) { this.mTimeoutMs = timeoutMs; return this; } /** * Specifies the request data * * @param data The data * @return Current builder instance for chaining */ public Builder withData(Object data) { this.mData = data; return this; } /** * Builds the request * * @return The built request */ public ElectrodeBridgeRequest build() { return new ElectrodeBridgeRequest(this); } } }
1,445
601
<reponame>Harshagracy/sp-dev-fx-webparts<gh_stars>100-1000 { "$schema": "https://developer.microsoft.com/json-schemas/spfx-build/config.2.0.schema.json", "version": "2.0", "bundles": { "enhanced-list-formatting-web-part": { "components": [ { "entrypoint": "./lib/webparts/enhancedListFormatting/EnhancedListFormattingWebPart.js", "manifest": "./src/webparts/enhancedListFormatting/EnhancedListFormattingWebPart.manifest.json" } ] } }, "externals": {}, "localizedResources": { "EnhancedListFormattingWebPartStrings": "lib/webparts/enhancedListFormatting/loc/{locale}.js", "MonacoControlsLibraryStrings": "lib/controls/loc/{locale}.js", "PropertyControlStrings": "node_modules/@pnp/spfx-property-controls/lib/loc/{locale}.js" } }
343
1,162
package org.nesc.ec.bigdata.model.vo; import java.util.Objects; /** * @author lg99 */ public class TopicMetricVo { private long clusterId; private String date; private String topic; private long byteInMetric; private long byteOutMetric; private long fileSize; @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TopicMetricVo that = (TopicMetricVo) o; return clusterId == that.clusterId && date.equals(that.date) && topic.equals(that.topic); } @Override public int hashCode() { return Objects.hash(clusterId, date, topic); } public TopicMetricVo(long clusterId, String date, String topic, long byteInMetric, long byteOutMetric) { this.clusterId = clusterId; this.date = date; this.topic = topic; this.byteInMetric = byteInMetric; this.byteOutMetric = byteOutMetric; } public TopicMetricVo(long clusterId, String date, String topic, long fileSize) { this.clusterId = clusterId; this.date = date; this.topic = topic; this.fileSize = fileSize; } public long getClusterId() { return clusterId; } public void setClusterId(long clusterId) { this.clusterId = clusterId; } public String getDate() { return date; } public void setDate(String date) { this.date = date; } public String getTopic() { return topic; } public void setTopic(String topic) { this.topic = topic; } public long getByteInMetric() { return byteInMetric; } public void setByteInMetric(long byteInMetric) { this.byteInMetric = byteInMetric; } public long getByteOutMetric() { return byteOutMetric; } public void setByteOutMetric(long byteOutMetric) { this.byteOutMetric = byteOutMetric; } public long getFileSize() { return fileSize; } public void setFileSize(long fileSize) { this.fileSize = fileSize; } }
958
472
<reponame>ghostnumber7/srt-live-server /** * The MIT License (MIT) * * Copyright (c) 2019-2020 Edward.Wu * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #ifndef _SLSMapRelay_INCLUDE_ #define _SLSMapRelay_INCLUDE_ #include <map> #include <string> #include "SLSRelayManager.hpp" #include "SLSLock.hpp" class CSLSMapRelay { public: CSLSMapRelay(); virtual ~CSLSMapRelay(); CSLSRelayManager *add_relay_manager(const char *app_uplive, const char *stream_name); void clear(); int add_relay_conf(std::string app_uplive, sls_conf_relay_t * cr); SLS_RELAY_INFO *get_relay_conf(std::string app_uplive); private: CSLSRWLock m_rwclock; std::map<std::string, CSLSRelayManager *> m_map_relay_manager; //stream_name: relay_manager std::map<std::string, SLS_RELAY_INFO *> m_map_relay_info; //uplive: relay_conf_info }; #endif
647
381
<filename>example/waimai/src/main/java/com/grouter/demo/other/service/UserLoginTask.java<gh_stars>100-1000 package com.grouter.demo.other.service; import android.hardware.SensorEvent; import com.grouter.GRouterTask; import com.grouter.RouterField; import com.grouter.RouterTask; import java.util.Map; @RouterTask(value = "UserLogin",returns = "User") public class UserLoginTask extends GRouterTask { @RouterField public int uid; @RouterField public String pwd; @RouterField public SensorEvent sensorEvent; @Override protected Object process() throws Exception { return null; } }
232
1,755
<gh_stars>1000+ #include "vtkHoverWidget.h" #include <cstdlib> #include <iostream> #include "WidgetTestingMacros.h" int vtkHoverWidgetTest1(int, char*[]) { vtkSmartPointer<vtkHoverWidget> node1 = vtkSmartPointer<vtkHoverWidget>::New(); EXERCISE_BASIC_HOVER_METHODS(node1); return EXIT_SUCCESS; }
131
1,338
/* * PackBits.h * Copyright 1999-2000 Y.Takagi. All Rights Reserved. */ #ifndef __PACKBITS_H #define __PACKBITS_H int pack_bits_size(const unsigned char* source, int size); int pack_bits(unsigned char* destination, const unsigned char* source, int size); #endif /* __PACKBITS_H */
106
14,668
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef ASH_QUICK_PAIR_FEATURE_STATUS_TRACKER_FAST_PAIR_ENABLED_PROVIDER_H_ #define ASH_QUICK_PAIR_FEATURE_STATUS_TRACKER_FAST_PAIR_ENABLED_PROVIDER_H_ #include "ash/quick_pair/feature_status_tracker/base_enabled_provider.h" #include "ash/quick_pair/feature_status_tracker/bluetooth_enabled_provider.h" #include "ash/quick_pair/feature_status_tracker/fast_pair_pref_enabled_provider.h" #include "ash/quick_pair/feature_status_tracker/google_api_key_availability_provider.h" #include "ash/quick_pair/feature_status_tracker/logged_in_user_enabled_provider.h" #include "ash/quick_pair/feature_status_tracker/screen_state_enabled_provider.h" #include "base/memory/weak_ptr.h" namespace ash { namespace quick_pair { // Exposes an |is_enabled()| method and callback to query and observe when the // Fast Pair feature is enabled/disabled. class FastPairEnabledProvider : public BaseEnabledProvider { public: explicit FastPairEnabledProvider( std::unique_ptr<BluetoothEnabledProvider> bluetooth_enabled_provider, std::unique_ptr<FastPairPrefEnabledProvider> fast_pair_pref_enabled_provider, std::unique_ptr<LoggedInUserEnabledProvider> logged_in_user_enabled_provider, std::unique_ptr<ScreenStateEnabledProvider> screen_state_enabled_provider, std::unique_ptr<GoogleApiKeyAvailabilityProvider> google_api_key_availability_provider); ~FastPairEnabledProvider() override; private: bool AreSubProvidersEnabled(); void OnSubProviderEnabledChanged(bool); std::unique_ptr<BluetoothEnabledProvider> bluetooth_enabled_provider_; std::unique_ptr<FastPairPrefEnabledProvider> fast_pair_pref_enabled_provider_; std::unique_ptr<LoggedInUserEnabledProvider> logged_in_user_enabled_provider_; std::unique_ptr<ScreenStateEnabledProvider> screen_state_enabled_provider_; std::unique_ptr<GoogleApiKeyAvailabilityProvider> google_api_key_availability_provider_; base::WeakPtrFactory<FastPairEnabledProvider> weak_factory_{this}; }; } // namespace quick_pair } // namespace ash #endif // ASH_QUICK_PAIR_FEATURE_STATUS_TRACKER_FAST_PAIR_ENABLED_PROVIDER_H_
793
1,178
// Copyright 2020 Makani Technologies LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include <stdint.h> #include "lib/bazel/swig_test/fruit.h" #include "lib/bazel/swig_test/utils.h" double Scale(uint16_t raw, double factor) { return static_cast<double>(raw) * factor; } Produce WholesalePrice(double price, uint16_t count) { Produce produce; produce.fruit = kFruitBanana; produce.price = price; produce.count = count; produce.total = price * count; return produce; } void TotalPrice(int32_t count, const double *price, double *total) { *total = static_cast<double>(count) * (*price); }
339
360
""" Developed by <NAME> in pybullet-planning repository (https://github.com/caelan/pybullet-planning) and adapted by iGibson team. """ from random import shuffle from itertools import islice import time INF = float('inf') RRT_ITERATIONS = 20 RRT_RESTARTS = 2 RRT_SMOOTHING = 20 def irange(start, stop=None, step=1): # np.arange if stop is None: stop = start start = 0 while start < stop: yield start start += step def argmin(function, sequence): values = list(sequence) scores = [function(x) for x in values] return values[scores.index(min(scores))] def pairs(lst): return zip(lst[:-1], lst[1:]) def merge_dicts(*args): result = {} for d in args: result.update(d) return result # return dict(reduce(operator.add, [d.items() for d in args])) def flatten(iterable_of_iterables): return (item for iterables in iterable_of_iterables for item in iterables) def randomize(sequence): shuffle(sequence) return sequence def take(iterable, n=INF): if n == INF: n = None # NOTE - islice takes None instead of INF elif n == None: n = 0 # NOTE - for some of the uses return islice(iterable, n) def enum(*sequential, **named): enums = dict(zip(sequential, range(len(sequential))), **named) enums['names'] = sorted(enums.keys(), key=lambda k: enums[k]) return type('Enum', (), enums) def elapsed_time(start_time): return time.time() - start_time
589
4,901
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.j2objc.nio.charset; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CharsetEncoder; import java.nio.charset.CodingErrorAction; import junit.framework.TestCase; import org.junit.Assert; /** * Test encoding and decoding of J2ObjC's charset implementations. * * @author <NAME> */ public class CharsetTest extends TestCase { private void assertCorrectDecoding(String expected, byte[] bytes, String charsetName) throws IOException { Charset cs = Charset.forName(charsetName); CharsetDecoder decoder = cs.newDecoder() .onMalformedInput(CodingErrorAction.REPLACE) .onUnmappableCharacter(CodingErrorAction.REPLACE); assertEquals(expected, decoder.decode(ByteBuffer.wrap(bytes)).toString()); assertEquals(expected, cs.decode(ByteBuffer.wrap(bytes)).toString()); assertEquals(expected, new String(bytes, charsetName)); assertEquals(expected, new String(bytes, cs)); } public void testDecoding() throws IOException { // UTF-8 with some invalid bytes. byte[] invalidUtf8 = { 91, 92, -1, -40, -1, -32, 1, 16, 74, 0, 70, -27, -101, 73, 70, -28, -72, -83, -27, -101 }; assertCorrectDecoding( "[\\\ufffd\ufffd\ufffd\ufffd\u0001\u0010J\0F\ufffdIF中\ufffd", invalidUtf8, "UTF-8"); // UTF-16 with different byte order marks. assertCorrectDecoding("abc", new byte[] { -2, -1, 0, 97, 0, 98, 0, 99 }, "UTF-16"); assertCorrectDecoding("abc", new byte[] { -1, -2, 97, 0, 98, 0, 99, 0 }, "UTF-16"); assertCorrectDecoding("abc", new byte[] { 0, 97, 0, 98, 0, 99 }, "UTF-16"); // UTF-16 with explicit endianness. assertCorrectDecoding("abc", new byte[] { 0, 97, 0, 98, 0, 99 }, "UTF-16BE"); assertCorrectDecoding("\ufeffabc", new byte[] { -2, -1, 0, 97, 0, 98, 0, 99 }, "UTF-16BE"); assertCorrectDecoding("abc", new byte[] { 97, 0, 98, 0, 99, 0 }, "UTF-16LE"); assertCorrectDecoding("\ufeffabc", new byte[] { -1, -2, 97, 0, 98, 0, 99, 0 }, "UTF-16LE"); // UTF-32 assertCorrectDecoding("abc", new byte[] { 0, 0, 0, 97, 0, 0, 0, 98, 0, 0, 0, 99 }, "UTF-32"); assertCorrectDecoding("abc", new byte[] { 0, 0, 0, 97, 0, 0, 0, 98, 0, 0, 0, 99 }, "UTF-32BE"); assertCorrectDecoding("abc", new byte[] { 97, 0, 0, 0, 98, 0, 0, 0, 99, 0, 0, 0 }, "UTF-32LE"); // Other encodings assertCorrectDecoding("abc", new byte[] { 97, 98, 99 }, "US-ASCII"); assertCorrectDecoding("abc", new byte[] { 97, 98, 99 }, "ISO-8859-1"); assertCorrectDecoding("abc", new byte[] { 97, 98, 99 }, "ISO-8859-2"); assertCorrectDecoding("日本", new byte[] { -58, -4, -53, -36 }, "EUC-JP"); assertCorrectDecoding("日本", new byte[] { -109, -6, -106, 123 }, "SHIFT_JIS"); assertCorrectDecoding( "日本", new byte[] { 27, 36, 66, 70, 124, 75, 92, 27, 40, 66 }, "ISO-2022-JP"); assertCorrectDecoding("öߍ", new byte[] { -10, -33, -115 }, "WINDOWS-1250"); assertCorrectDecoding("фЭЖ", new byte[] { -12, -35, -58 }, "WINDOWS-1251"); assertCorrectDecoding("žºé", new byte[] { -98, -70, -23 }, "WINDOWS-1252"); assertCorrectDecoding("ΔΣΨ", new byte[] { -60, -45, -40 }, "WINDOWS-1253"); assertCorrectDecoding("Ğ¿ÿ", new byte[] { -48, -65, -1 }, "WINDOWS-1254"); assertCorrectDecoding("√ˇà", new byte[] { -61, -1, -120 }, "X-MACROMAN"); assertCorrectDecoding("觥秤", new byte[] { -10, -95, -77, -45 }, "GB2312"); assertCorrectDecoding("구분", new byte[] { -79, -72, -70, -48 }, "EUC-KR"); assertCorrectDecoding("侖侶", new byte[] { -127, -10, -126, 72 }, "gbk"); assertCorrectDecoding("侖侶", new byte[] { -127, -10, -126, 72 }, "gb18030"); assertCorrectDecoding("高雄市", new byte[] { -80, -86, -74, -81, -91, -85 }, "Big5"); assertCorrectDecoding("高雄市", new byte[] { -80, -86, -74, -81, -91, -85 }, "Big5-HKSCS"); } private void assertCorrectEncoding(byte[] expected, String input, String charsetName) throws IOException { Charset cs = Charset.forName(charsetName); CharsetEncoder encoder = cs.newEncoder() .onMalformedInput(CodingErrorAction.REPLACE) .onUnmappableCharacter(CodingErrorAction.REPLACE); ByteBuffer bb = encoder.encode(CharBuffer.wrap(input.toCharArray())); byte[] result = new byte[bb.remaining()]; bb.get(result); Assert.assertArrayEquals(expected, result); bb = cs.encode(CharBuffer.wrap(input.toCharArray())); result = new byte[bb.remaining()]; bb.get(result); Assert.assertArrayEquals(expected, result); Assert.assertArrayEquals(expected, input.getBytes(charsetName)); Assert.assertArrayEquals(expected, input.getBytes(cs)); } public void testEncoding() throws IOException { // UTF-16 assertCorrectEncoding(new byte[] { -2, -1, 0, 97, 0, 98, 0, 99 }, "abc", "UTF-16"); assertCorrectEncoding(new byte[] { 0, 97, 0, 98, 0, 99 }, "abc", "UTF-16BE"); assertCorrectEncoding(new byte[] { 97, 0, 98, 0, 99, 0 }, "abc", "UTF-16LE"); // UTF-32 assertCorrectEncoding(new byte[] { 0, 0, 0, 97, 0, 0, 0, 98, 0, 0, 0, 99 }, "abc", "UTF-32"); assertCorrectEncoding(new byte[] { 0, 0, 0, 97, 0, 0, 0, 98, 0, 0, 0, 99 }, "abc", "UTF-32BE"); assertCorrectEncoding(new byte[] { 97, 0, 0, 0, 98, 0, 0, 0, 99, 0, 0, 0 }, "abc", "UTF-32LE"); // Other encodings assertCorrectEncoding(new byte[] { 97, 98, 99 }, "abc", "US-ASCII"); assertCorrectEncoding(new byte[] { 97, 98, 99 }, "abc", "ISO-8859-1"); assertCorrectEncoding(new byte[] { 97, 98, 99 }, "abc", "ISO-8859-2"); assertCorrectEncoding(new byte[] { -58, -4, -53, -36 }, "日本", "EUC-JP"); assertCorrectEncoding(new byte[] { -109, -6, -106, 123 }, "日本", "SHIFT_JIS"); assertCorrectEncoding( new byte[] { 27, 36, 66, 70, 124, 75, 92, 27, 40, 66 }, "日本", "ISO-2022-JP"); assertCorrectEncoding(new byte[] { -10, -33, -115 }, "öߍ", "WINDOWS-1250"); assertCorrectEncoding(new byte[] { -12, -35, -58 }, "фЭЖ", "WINDOWS-1251"); assertCorrectEncoding(new byte[] { -98, -70, -23 }, "žºé", "WINDOWS-1252"); assertCorrectEncoding(new byte[] { -60, -45, -40 }, "ΔΣΨ", "WINDOWS-1253"); assertCorrectEncoding(new byte[] { -48, -65, -1 }, "Ğ¿ÿ", "WINDOWS-1254"); assertCorrectEncoding(new byte[] { -61, -1, -120 }, "√ˇà", "X-MACROMAN"); assertCorrectEncoding(new byte[] { -10, -95, -77, -45 }, "觥秤", "GB2312"); assertCorrectEncoding(new byte[] { -79, -72, -70, -48 }, "구분", "EUC-KR"); assertCorrectEncoding(new byte[] { -127, -10, -126, 72 }, "侖侶", "gbk"); assertCorrectEncoding(new byte[] { -127, -10, -126, 72 }, "侖侶", "gb18030"); assertCorrectEncoding(new byte[] { -80, -86, -74, -81, -91, -85 }, "高雄市", "Big5"); assertCorrectEncoding(new byte[] { -80, -86, -74, -81, -91, -85 }, "高雄市", "Big5-HKSCS"); // Unmappable character assertCorrectEncoding(new byte[] { 97, 98, 63, 99, 100 }, "ab\uD7C5cd", "ISO-8859-1"); } }
3,078
8,805
/*! @file Forward declares `boost::hana::default_` and `boost::hana::is_default`. @copyright <NAME> 2013-2016 Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE.md or copy at http://boost.org/LICENSE_1_0.txt) */ #ifndef BOOST_HANA_FWD_CORE_DEFAULT_HPP #define BOOST_HANA_FWD_CORE_DEFAULT_HPP #include <boost/hana/config.hpp> BOOST_HANA_NAMESPACE_BEGIN //! @ingroup group-core //! Mark a tag-dispatched method implementation as a default implementation. //! //! When defining a new concept with tag-dispatched methods, it is //! sometimes possible to provide a default implementation for some //! method(s). Making `default_` a base class of such a default //! implementation makes it possible to detect whether the method //! was dispatched to the default implementation afterwards. //! //! //! Example //! ------- //! @include example/core/default.cpp struct default_ { }; //! @ingroup group-core //! Returns whether a tag-dispatched method implementation is a default //! implementation. //! //! Given a tag-dispatched method implementation `method_impl<T...>`, //! `is_default<method_impl<T...>>` returns whether `method_impl<T...>` //! is a default implementation. Note that if there is no default //! implementation for the method, then `is_default` should not be //! used unless a static assertion saying that "the method is not //! implemented" is acceptable. //! //! //! Example //! ------- //! @include example/core/default.cpp #ifdef BOOST_HANA_DOXYGEN_INVOKED template <typename Method> struct is_default { see documentation }; #else template <typename T, typename = void> struct is_default; #endif BOOST_HANA_NAMESPACE_END #endif // !BOOST_HANA_FWD_CORE_DEFAULT_HPP
711
453
<filename>docs/docsource/scaldoc/resources.py # coding: utf-8 """Helpers to get basic resources (covers, fonts, …)""" import glob import os import string import scaldoc.paths as paths def get_cover(name): """Return the absolute path to the specified cover (RING, S3C, …).""" return os.path.join(paths.SHARED_STATIC, '{0}_cover.png'.format(name)) def get_footer_logo(): """Return the absolute path to the footer logo.""" return os.path.join(paths.SHARED_STATIC, 'footer_logo.png') def get_fonts(): """Return a list of paths to font files to load.""" return glob.glob(os.path.join(paths.SHARED_FONTS, '*', '*.ttf')) def get_latex_preamble(cover, logo, title, title_voffset, version, copyright): """Return the content of the LaTeX preamble. Args: cover (str): path to the cover image logo (str): path to the logo image title (str): document title title_voffset (str): vertical offset of the title on the cover page version (str): document version copyright (str): document copyright """ with open(os.path.join(paths.SHARED_TEMPLATES, 'preamble.tex'), 'r') as fp: preamble = string.Template(fp.read()) return preamble.substitute( cover=cover, logo=logo, title=title, title_voffset=title_voffset, version=version, copyright=copyright )
593
3,345
#ifndef __EEPROM_H #define __EEPROM_H /* Author: <NAME> WebSite: http://www.github.com/NimaLTD Instagram: http://instagram.com/github.NimaLTD Youtube: https://www.youtube.com/channel/UCUhY7qY1klJm1d2kulr9ckw Version: 2.0.1 (2.0.1) Change function name to ee_commit(). Reversion History: (2.0.0) Rewrite again. */ #ifdef __cplusplus extern "C" { #endif // #include <stdbool.h> #include <stdint.h> //#include "gpio.h" //################################################################################################################ uint8_t ee_format(void); uint8_t ee_read(uint32_t startVirtualAddress, uint32_t len, uint8_t* data); uint8_t ee_write(uint32_t startVirtualAddress, uint32_t len, uint8_t* data); uint32_t ee_maxVirtualAddress(void); //################################################################################################################ #ifdef __cplusplus } #endif #endif
359
1,027
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alipay.remoting.config; /** * The base implementation class of the configuration item. * * @author chengyi (<EMAIL>) 2018-11-06 17:25 */ public class BoltOption<T> { private final String name; private T defaultValue; protected BoltOption(String name, T defaultValue) { this.name = name; this.defaultValue = defaultValue; } public String name() { return name; } public T defaultValue() { return defaultValue; } public static <T> BoltOption<T> valueOf(String name) { return new BoltOption<T>(name, null); } public static <T> BoltOption<T> valueOf(String name, T defaultValue) { return new BoltOption<T>(name, defaultValue); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } BoltOption<?> that = (BoltOption<?>) o; return name != null ? name.equals(that.name) : that.name == null; } @Override public int hashCode() { return name != null ? name.hashCode() : 0; } }
683
599
<gh_stars>100-1000 package com.jlmd.android.newfilmsmvp.mvp.view; import com.jlmd.android.newfilmsmvp.domain.model.MovieDetails; /** * @author jlmd */ public interface MovieDetailsView extends View { void showLoading(); void hideLoading(); void renderMovie(final MovieDetails movieDetails); }
99
595
from django.http import HttpResponseRedirect from django.template.response import TemplateResponse from django.contrib.auth import authenticate, login as auth_login, logout as auth_logout; from rest_framework.permissions import BasePermission, SAFE_METHODS def login(request): redirect_to = request.GET.get("next", "/dashboard/") return TemplateResponse(request, 'auth/login.html', {"redirect_to": redirect_to}) def do(request): username = request.POST['username'] password = request.POST['password'] user = authenticate(username=username, password=password) redirect_to = request.POST['redirect_to'] if user is not None: if user.is_active: auth_login(request, user) return HttpResponseRedirect(redirect_to) else: return HttpResponseRedirect('/login/') def logout(request): auth_logout(request); return HttpResponseRedirect('/login/') class IsAuthenticatedOrCreateOnly(BasePermission): """ The request is authenticated as a user, or is a read-only request. """ def has_permission(self, request, view): return ( request.method not in SAFE_METHODS or request.user and request.user.is_authenticated() )
456
435
<gh_stars>100-1000 { "description": "Large organizations have traditionally used languages such as C ++, .NET\nand Java development for enterprise applications. \"Scripting\" languages\nlike Python were considered risky. Not sufficiently robust with the lack\nof the tools around them (framework). Times have, however, changed and\nthe Python language has become surrounded by the huge open source\ncommunity that takes care of the maintenance of the ecosystem. We see\nincreasing use of dynamic languages for web applications development.\nBut what is meant by productive enterprise or web application? These\napplications should meet several requirements. Scalability, zero down\ntime deployments, short release cycles and many other properties\n", "duration": 1835, "language": "slk", "recorded": "2016-03-11", "speakers": [ "<NAME>" ], "thumbnail_url": "https://i.ytimg.com/vi/snUGyOTn26g/hqdefault.jpg", "title": "Virtualenv, Flask, Vagrant, etc.", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=snUGyOTn26g" } ] }
318
312
# !/usr/bin/env/python3 # Copyright (c) Facebook, Inc. and its affiliates. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """AG news Classification script.""" import os from argparse import ArgumentParser import pandas as pd import pytorch_lightning as pl import requests import torch import torch.nn.functional as F from pytorch_lightning import seed_everything from pytorch_lightning.callbacks import ( EarlyStopping, ModelCheckpoint, LearningRateMonitor, ) from pytorch_lightning.accelerators import ( CPUAccelerator, ) from pytorch_lightning.plugins import ( NativeMixedPrecisionPlugin, DDPPlugin, ) from pytorch_lightning.metrics import Accuracy from sklearn.model_selection import ( train_test_split, ) from torch import nn from torch.utils.data import Dataset, DataLoader import torchtext.datasets as td from transformers import ( BertModel, BertTokenizer, AdamW, ) class AGNewsDataset(Dataset): def __init__( self, reviews, targets, tokenizer, max_length, ): """ Performs initialization of tokenizer :param reviews: AG news text :param targets: labels :param tokenizer: bert tokenizer :param max_length: maximum length of the news text """ self.reviews = reviews self.targets = targets self.tokenizer = tokenizer self.max_length = max_length def __len__(self): """ :return: returns the number of datapoints in the dataframe """ return len(self.reviews) def __getitem__(self, item): """ Returns the review text and the targets of the specified item :param item: Index of sample review :return: Returns the dictionary of review text, input ids, attention mask, targets """ review = str(self.reviews[item]) target = self.targets[item] encoding = self.tokenizer.encode_plus( review, add_special_tokens=True, max_length=self.max_length, return_token_type_ids=False, padding="max_length", return_attention_mask=True, return_tensors="pt", truncation=True, ) return { "review_text": review, "input_ids": encoding[ "input_ids" ].flatten(), "attention_mask": encoding[ "attention_mask" ].flatten(), "targets": torch.tensor( target, dtype=torch.long ), } class BertDataModule(pl.LightningDataModule): def __init__(self, **kwargs): """ Initialization of inherited lightning data module """ super(BertDataModule, self).__init__() self.PRE_TRAINED_MODEL_NAME = ( "bert-base-uncased" ) self.df_train = None self.df_val = None self.df_test = None self.train_data_loader = None self.val_data_loader = None self.test_data_loader = None self.MAX_LEN = 100 self.encoding = None self.tokenizer = None self.args = kwargs self.NUM_SAMPLES_COUNT = self.args[ "num_samples" ] self.VOCAB_FILE_URL = self.args[ "vocab_file" ] self.VOCAB_FILE = ( "bert_base_uncased_vocab.txt" ) @staticmethod def process_label(rating): rating = int(rating) return rating - 1 def prepare_data(self): """ Implementation of abstract class """ def setup(self, stage=None): """ Downloads the data, parse it and split the data into train, test, validation data :param stage: Stage - training or testing """ # reading the input td.AG_NEWS( root="data", split=("train", "test") ) extracted_files = os.listdir("data") train_csv_path = None for fname in extracted_files: if fname.endswith("train.csv"): train_csv_path = os.path.join( os.getcwd(), "data", fname ) df = pd.read_csv(train_csv_path) df.columns = [ "label", "title", "description", ] df.sample(frac=1) df = df.iloc[: self.NUM_SAMPLES_COUNT] df["label"] = df.label.apply( self.process_label ) if not os.path.isfile(self.VOCAB_FILE): filePointer = requests.get( self.VOCAB_FILE_URL, allow_redirects=True, ) if filePointer.ok: with open( self.VOCAB_FILE, "wb" ) as f: f.write(filePointer.content) else: raise RuntimeError( "Error in fetching the vocab file" ) self.tokenizer = BertTokenizer( self.VOCAB_FILE ) RANDOM_SEED = 42 seed_everything(RANDOM_SEED) df_train, df_test = train_test_split( df, test_size=0.2, random_state=RANDOM_SEED, stratify=df["label"], ) df_train, df_val = train_test_split( df_train, test_size=0.25, random_state=RANDOM_SEED, stratify=df_train["label"], ) self.df_train = df_train self.df_test = df_test self.df_val = df_val @staticmethod def add_model_specific_args(parent_parser): """ Returns the review text and the targets of the specified item :param parent_parser: Application specific parser :return: Returns the augmented arugument parser """ parser = ArgumentParser( parents=[parent_parser], add_help=False, ) parser.add_argument( "--batch-size", type=int, default=16, metavar="N", help="input batch size for training (default: 16)", ) parser.add_argument( "--num-workers", type=int, default=3, metavar="N", help="number of workers (default: 0)", ) return parser def create_data_loader( self, df, tokenizer, max_len, batch_size ): """ Generic data loader function :param df: Input dataframe :param tokenizer: bert tokenizer :param max_len: Max length of the news datapoint :param batch_size: Batch size for training :return: Returns the constructed dataloader """ ds = AGNewsDataset( reviews=df.description.to_numpy(), targets=df.label.to_numpy(), tokenizer=tokenizer, max_length=max_len, ) return DataLoader( ds, batch_size=self.args["batch_size"], num_workers=self.args["num_workers"], ) def train_dataloader(self): """ :return: output - Train data loader for the given input """ self.train_data_loader = self.create_data_loader( self.df_train, self.tokenizer, self.MAX_LEN, self.args["batch_size"], ) return self.train_data_loader def val_dataloader(self): """ :return: output - Validation data loader for the given input """ self.val_data_loader = self.create_data_loader( self.df_val, self.tokenizer, self.MAX_LEN, self.args["batch_size"], ) return self.val_data_loader def test_dataloader(self): """ :return: output - Test data loader for the given input """ self.test_data_loader = self.create_data_loader( self.df_test, self.tokenizer, self.MAX_LEN, self.args["batch_size"], ) return self.test_data_loader class BertNewsClassifier(pl.LightningModule): def __init__(self, **kwargs): """ Initializes the network, optimizer and scheduler """ super(BertNewsClassifier, self).__init__() self.train_acc = Accuracy() self.val_acc = Accuracy() self.test_acc = Accuracy() self.PRE_TRAINED_MODEL_NAME = ( "bert-base-uncased" ) self.bert_model = BertModel.from_pretrained( self.PRE_TRAINED_MODEL_NAME ) for param in self.bert_model.parameters(): param.requires_grad = False self.drop = nn.Dropout(p=0.2) # assigning labels self.class_names = [ "world", "Sports", "Business", "Sci/Tech", ] n_classes = len(self.class_names) self.fc1 = nn.Linear( self.bert_model.config.hidden_size, 512, ) self.out = nn.Linear(512, n_classes) self.args = kwargs def forward(self, input_ids, attention_mask): """ :param input_ids: Input data :param attention_maks: Attention mask value :return: output - Type of news for the given news snippet """ pooled_output = self.bert_model( input_ids=input_ids, attention_mask=attention_mask, ).pooler_output output = F.relu(self.fc1(pooled_output)) output = self.drop(output) output = self.out(output) return output @staticmethod def add_model_specific_args(parent_parser): """ Returns the review text and the targets of the specified item :param parent_parser: Application specific parser :return: Returns the augmented arugument parser """ parser = ArgumentParser( parents=[parent_parser], add_help=False, ) parser.add_argument( "--lr", type=float, default=0.001, metavar="LR", help="learning rate (default: 0.001)", ) return parser def training_step( self, train_batch, batch_idx ): """ Training the data as batches and returns training loss on each batch :param train_batch Batch data :param batch_idx: Batch indices :return: output - Training loss """ input_ids = train_batch["input_ids"] attention_mask = train_batch[ "attention_mask" ] targets = train_batch["targets"] output = self.forward( input_ids, attention_mask ) _, y_hat = torch.max(output, dim=1) loss = F.cross_entropy(output, targets) self.train_acc(y_hat, targets) self.log( "train_acc", self.train_acc.compute().cpu(), ) self.log("train_loss", loss.cpu()) return {"loss": loss} def test_step(self, test_batch, batch_idx): """ Performs test and computes the accuracy of the model :param test_batch: Batch data :param batch_idx: Batch indices :return: output - Testing accuracy """ input_ids = test_batch["input_ids"] attention_mask = test_batch[ "attention_mask" ] targets = test_batch["targets"] output = self.forward( input_ids, attention_mask ) _, y_hat = torch.max(output, dim=1) self.test_acc(y_hat, targets) self.log( "test_acc", self.test_acc.compute().cpu(), ) def validation_step( self, val_batch, batch_idx ): """ Performs validation of data in batches :param val_batch: Batch data :param batch_idx: Batch indices :return: output - valid step loss """ input_ids = val_batch["input_ids"] attention_mask = val_batch[ "attention_mask" ] targets = val_batch["targets"] output = self.forward( input_ids, attention_mask ) _, y_hat = torch.max(output, dim=1) loss = F.cross_entropy(output, targets) self.val_acc(y_hat, targets) self.log( "val_acc", self.val_acc.compute().cpu(), ) self.log("val_loss", loss, sync_dist=True) def configure_optimizers(self): """ Initializes the optimizer and learning rate scheduler :return: output - Initialized optimizer and scheduler """ optimizer = AdamW( self.parameters(), lr=self.args["lr"] ) scheduler = { "scheduler": torch.optim.lr_scheduler.ReduceLROnPlateau( optimizer, mode="min", factor=0.2, patience=2, min_lr=1e-6, verbose=True, ), "monitor": "val_loss", } return [optimizer], [scheduler] if __name__ == "__main__": parser = ArgumentParser( description="Bert-News Classifier Example" ) parser.add_argument( "--num_samples", type=int, default=15000, metavar="N", help="Samples for training and evaluation steps (default: 15000) Maximum:100000", ) parser.add_argument( "--vocab_file", default="https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt", help="Custom vocab file", ) parser.add_argument( "--checkpoint_dir", default="/workspace/checkpoint/", help="Path of checkpoint directory", ) parser.add_argument( "--model_save_path", default="/workspace/model", help="Path to model file", ) parser = pl.Trainer.add_argparse_args( parent_parser=parser ) parser = BertNewsClassifier.add_model_specific_args( parent_parser=parser ) parser = BertDataModule.add_model_specific_args( parent_parser=parser ) args = parser.parse_args() dict_args = vars(args) if "accelerator" in dict_args: if dict_args["accelerator"] == "None": dict_args["accelerator"] = None dm = BertDataModule(**dict_args) dm.prepare_data() dm.setup(stage="fit") model = BertNewsClassifier(**dict_args) early_stopping = EarlyStopping( monitor="val_loss", mode="min", verbose=True, ) checkpoint_callback = ModelCheckpoint( dirpath=dict_args["checkpoint_dir"], save_top_k=1, verbose=True, monitor="val_loss", mode="min", ) lr_logger = LearningRateMonitor() trainer = pl.Trainer.from_argparse_args( # args, callbacks=[lr_logger, early_stopping, checkpoint_callback], checkpoint_callback=True, plugins=DDPPlugin(cluster_environment=KubeflowEnvironment) args, callbacks=[ lr_logger, early_stopping, checkpoint_callback, ], checkpoint_callback=True, plugins=DDPPlugin( find_unused_parameters=False ), ) trainer.fit(model, dm) trainer.test(model) if trainer.global_rank == 0: os.makedirs(dict_args["model_save_path"]) torch.save( model.state_dict(), os.path.join( dict_args["model_save_path"], "bert.pt", ), )
7,838
4,879
<filename>drape_frontend/screen_animations.hpp<gh_stars>1000+ #pragma once #include "drape_frontend/animation/animation.hpp" #include "drape_frontend/screen_operations.hpp" #include <string> namespace df { extern std::string const kPrettyMoveAnim; extern std::string const kPrettyFollowAnim; extern std::string const kParallelFollowAnim; extern std::string const kParallelLinearAnim; class SequenceAnimation; class MapLinearAnimation; class MapFollowAnimation; class MapScaleAnimation; drape_ptr<SequenceAnimation> GetPrettyMoveAnimation(ScreenBase const & startScreen, ScreenBase const & endScreen); drape_ptr<SequenceAnimation> GetPrettyMoveAnimation(ScreenBase const & screen, m2::AnyRectD const & startRect, m2::AnyRectD const & endRect); drape_ptr<SequenceAnimation> GetPrettyMoveAnimation(ScreenBase const & screen, double startScale, double endScale, m2::PointD const & startPt, m2::PointD const & endPt); drape_ptr<SequenceAnimation> GetPrettyFollowAnimation(ScreenBase const & startScreen, m2::PointD const & userPos, double targetScale, double targetAngle, m2::PointD const & endPixelPos); drape_ptr<MapLinearAnimation> GetRectAnimation(ScreenBase const & startScreen, ScreenBase const & endScreen); drape_ptr<MapLinearAnimation> GetSetRectAnimation(ScreenBase const & screen, m2::AnyRectD const & startRect, m2::AnyRectD const & endRect); drape_ptr<MapFollowAnimation> GetFollowAnimation(ScreenBase const & startScreen, m2::PointD const & userPos, double targetScale, double targetAngle, m2::PointD const & endPixelPos, bool isAutoZoom); drape_ptr<MapScaleAnimation> GetScaleAnimation(ScreenBase const & startScreen, m2::PointD pxScaleCenter, m2::PointD glbScaleCenter, double factor); } // namespace df
823
440
package com.yb.socket.push.mqtt; /** * @author <EMAIL> * @date 2019/2/26 14:58 */ public class ChannelDO { public String channelId; public String clientId; public String topic; public int createTime; public String getChannelId() { return channelId; } public void setChannelId(String channelId) { this.channelId = channelId; } public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } public String getTopic() { return topic; } public void setTopic(String topic) { this.topic = topic; } public int getCreateTime() { return createTime; } public void setCreateTime(int createTime) { this.createTime = createTime; } }
333
794
import csv,pprint from .layers import Base def get_human_readable(num): units=['','K','M','G','T','P'] idx=0 while .001*num>1: num=.001*num idx+=1 if idx>=len(units): return '%.3e'%num return '%.3f'%num+units[idx] def save_csv(layers,csv_save_path='/tmp/analyse.csv', save_items=('name', 'layer_info', 'input', 'out', 'dot', 'add', 'compare','ops', 'weight_size','activation_size'), print_detail=True,human_readable=True): # layers = get_layer_blox_from_blobs(blobs) print_list = [] sum=[0]*len(save_items) for layer in layers: print_line=[] for idx,param in enumerate(save_items): item=getattr(layer, param) if type(item)==list: s='' for i in item: s+=' '+str(i) else: s=str(item) try: num=int(item) sum[idx]+=num except:pass print_line.append(s) print_list.append(print_line) if print_detail: sum[0] = 'SUM' print_list.append(sum) print_table(print_list,save_items) else: print_list=[] for idx,item in enumerate(sum): if item>0: if human_readable: print_list.append('%s:%s' % (save_items[idx], get_human_readable(item))) else: print_list.append('%s:%.3e'%(save_items[idx],item)) print(print_list) if csv_save_path!=None: with open(csv_save_path,'w') as file: writer=csv.writer(file) writer.writerow(save_items) for layer in print_list: writer.writerow(layer) print('saved at {}!'.format(csv_save_path)) def get_layer_blox_from_blobs(blobs): layers=[] def creator_search(blob): for father in blob.father: if isinstance(father,Base) and father not in layers: layers.append(father) if father.muti_input==True: for input in father.input: creator_search(input) else: creator_search(father.input) for blob in blobs: creator_search(blob) return layers def print_table(datas,names): types=[] for i in datas[0]: try: i=int(float(i)) types.append('I') except: types.append('S') for l in datas: s='' for i,t in zip(l,types): if t=='I': i=int(float(i)) s+=('%.1E'%i).center(10) else: i=str(i) if len(i)>20: i=i[:17]+'...' s+=i.center(20) s+='|' print(s) s = '' for i,t in zip(names,types): if t == 'I': s += i.center(10) else: if len(i) > 20: i = i[:17] + '...' s += i.center(20) s += '|' print(s) def print_by_blob(blobs,print_items=('name', 'layer_info', 'input', 'out', 'dot', 'add', 'compare','ops', 'weight_size','activation_size')): layers=get_layer_blox_from_blobs(blobs) print_list = [] for layer in layers: print_list.append([str(getattr(layer, param)) for param in print_items]) pprint.pprint(print_list, depth=3, width=200) return print_list
1,916
1,405
package QQPIM; import com.qq.taf.jce.JceDisplayer; import com.qq.taf.jce.JceInputStream; import com.qq.taf.jce.JceOutputStream; import com.qq.taf.jce.JceStruct; import com.qq.taf.jce.JceUtil; public final class UninstallInfo extends JceStruct implements Cloneable { static final /* synthetic */ boolean a = (!UninstallInfo.class.desiredAssertionStatus()); public int option; public String uid; public UninstallInfo() { this.option = 0; this.uid = ""; this.option = this.option; this.uid = this.uid; } public UninstallInfo(int i, String str) { this.option = 0; this.uid = ""; this.option = i; this.uid = str; } public final String className() { return "QQPIM.UninstallInfo"; } @Override // java.lang.Object public final Object clone() { try { return super.clone(); } catch (CloneNotSupportedException e) { if (a) { return null; } throw new AssertionError(); } } @Override // com.qq.taf.jce.JceStruct public final void display(StringBuilder sb, int i) { JceDisplayer jceDisplayer = new JceDisplayer(sb, i); jceDisplayer.display(this.option, "option"); jceDisplayer.display(this.uid, "uid"); } public final boolean equals(Object obj) { if (obj == null) { return false; } UninstallInfo uninstallInfo = (UninstallInfo) obj; return JceUtil.equals(this.option, uninstallInfo.option) && JceUtil.equals(this.uid, uninstallInfo.uid); } public final String fullClassName() { return "QQPIM.UninstallInfo"; } public final int getOption() { return this.option; } public final String getUid() { return this.uid; } public final int hashCode() { try { throw new Exception("Need define key first!"); } catch (Exception e) { e.printStackTrace(); return 0; } } @Override // com.qq.taf.jce.JceStruct public final void readFrom(JceInputStream jceInputStream) { this.option = jceInputStream.read(this.option, 0, true); this.uid = jceInputStream.readString(1, true); } public final void setOption(int i) { this.option = i; } public final void setUid(String str) { this.uid = str; } @Override // com.qq.taf.jce.JceStruct public final void writeTo(JceOutputStream jceOutputStream) { jceOutputStream.write(this.option, 0); jceOutputStream.write(this.uid, 1); } }
1,177
418
// // BFProductDetailViewController.h // OpenShop // // Created by <NAME> // Copyright (c) 2015 Business Factory. All rights reserved. // #import "BFTableViewController.h" #import "BFProductDetailHeaderView.h" #import "BFWishlistItem.h" NS_ASSUME_NONNULL_BEGIN /** * Protocol which informs about product variant selection. */ @protocol BFProductVariantSelectionDelegate <NSObject> @optional /** * Informs about product variant color selection. * @param productVariantColor product variant color which was selected. */ - (void)selectedProductVariantColor:(BFProductVariantColor *)productVariantColor; @end /** * `BFProductDetailViewController` displays the product information details. */ @interface BFProductDetailViewController : BFTableViewController <BFCustomAppearance, BFProductDetailHeaderViewDelegate, BFProductVariantSelectionDelegate> /** * The product data model. */ @property (nonatomic, strong, nullable) BFProduct *product; /** * The wishlist item data model. */ @property (nonatomic, strong, nullable) BFWishlistItem *wishlistItem; /** * The product info. */ @property (nonatomic, strong) BFDataRequestProductInfo *productInfo; /** * The product detail header view. */ @property (nonatomic, strong) BFProductDetailHeaderView *headerView; @end NS_ASSUME_NONNULL_END
407
1,627
package com.cdkpatterns; import software.amazon.awscdk.core.CfnOutput; import software.amazon.awscdk.core.Construct; import software.amazon.awscdk.core.Stack; import software.amazon.awscdk.core.StackProps; import software.amazon.awscdk.services.apigatewayv2.HttpApi; import software.amazon.awscdk.services.apigatewayv2.integrations.LambdaProxyIntegration; import software.amazon.awscdk.services.dynamodb.Attribute; import software.amazon.awscdk.services.dynamodb.AttributeType; import software.amazon.awscdk.services.dynamodb.Table; import software.amazon.awscdk.services.dynamodb.TableProps; import software.amazon.awscdk.services.lambda.Code; import software.amazon.awscdk.services.lambda.Function; import software.amazon.awscdk.services.lambda.Runtime; import java.util.Map; public class TheSimpleWebserviceStack extends Stack { public TheSimpleWebserviceStack(final Construct scope, final String id) { this(scope, id, null); } public TheSimpleWebserviceStack(final Construct scope, final String id, final StackProps props) { super(scope, id, props); Table dynamoDbTable = createDynamoDBTable(); Function lambda = createLambda(dynamoDbTable.getTableName()); HttpApi api = createHttpApi(lambda); dynamoDbTable.grantReadWriteData(lambda); CfnOutput.Builder.create(this, "ApiUrl") .description("HTTP API Url") .value(api.getUrl()) .build(); } private HttpApi createHttpApi(Function dynamoLambda) { return HttpApi.Builder.create(this, "Endpoint") .defaultIntegration( LambdaProxyIntegration.Builder.create() .handler(dynamoLambda) .build()) .build(); } private Function createLambda(String tableName) { return Function.Builder.create(this, "DynamoLambdaHandler") .code(Code.fromAsset("./lambda/target/lambda.zip")) .handler("com.cdkpatterns.LambdaHandler::handleRequest") .runtime(Runtime.JAVA_11) .memorySize(1538) .environment(Map.of( "HITS_TABLE_NAME", tableName, "REGION", this.getRegion())) .build(); } private Table createDynamoDBTable() { return new Table(this, "Hits", TableProps.builder() .partitionKey(Attribute.builder() .name("path") .type(AttributeType.STRING) .build()) .build()); } }
1,197
483
package org.dizitart.no2.migration; import org.dizitart.no2.common.Fields; import org.dizitart.no2.common.tuples.Pair; import org.dizitart.no2.common.tuples.Quartet; import org.dizitart.no2.common.tuples.Triplet; import java.util.List; /** * Represents a migration instruction set for {@link org.dizitart.no2.repository.ObjectRepository}. * * @author <NAME> * @since 4.0 */ public interface RepositoryInstruction extends Instruction { /** * Adds an instruction to rename the {@link org.dizitart.no2.repository.ObjectRepository}. * * @param entityName the entity name * @param key the key * @return the repository instruction */ default RepositoryInstruction renameRepository(String entityName, String key) { MigrationStep migrationStep = new MigrationStep(); migrationStep.setInstructionType(InstructionType.RenameRepository); migrationStep.setArguments(new Quartet<>(entityName(), key(), entityName, key)); addStep(migrationStep); final RepositoryInstruction parent = this; return new RepositoryInstruction() { @Override public String entityName() { return entityName; } @Override public String key() { return key; } @Override public void addStep(MigrationStep step) { parent.addStep(step); } }; } /** * Adds an instruction to add new field to the entity in * the {@link org.dizitart.no2.repository.ObjectRepository}. * * @param <T> the type parameter * @param fieldName the field name * @return the repository instruction */ default <T> RepositoryInstruction addField(String fieldName) { return addField(fieldName, null); } /** * Adds an instruction to add new field with a default value, into the entity in the * {@link org.dizitart.no2.repository.ObjectRepository}. * * @param <T> the type parameter * @param fieldName the field name * @param defaultValue the default value * @return the repository instruction */ default <T> RepositoryInstruction addField(String fieldName, T defaultValue) { MigrationStep migrationStep = new MigrationStep(); migrationStep.setInstructionType(InstructionType.RepositoryAddField); migrationStep.setArguments(new Quartet<>(entityName(), key(), fieldName, defaultValue)); addStep(migrationStep); return this; } /** * Adds an instruction to add new field with value generator, into the entity in * the {@link org.dizitart.no2.repository.ObjectRepository}. * * @param <T> the type parameter * @param fieldName the field name * @param generator the generator * @return the repository instruction */ default <T> RepositoryInstruction addField(String fieldName, Generator<T> generator) { MigrationStep migrationStep = new MigrationStep(); migrationStep.setInstructionType(InstructionType.RepositoryAddField); migrationStep.setArguments(new Quartet<>(entityName(), key(), fieldName, generator)); addStep(migrationStep); return this; } /** * Adds an instruction to rename a field to the entity in the * {@link org.dizitart.no2.repository.ObjectRepository}. * * @param oldName the old name * @param newName the new name * @return the repository instruction */ default RepositoryInstruction renameField(String oldName, String newName) { MigrationStep migrationStep = new MigrationStep(); migrationStep.setInstructionType(InstructionType.RepositoryRenameField); migrationStep.setArguments(new Quartet<>(entityName(), key(), oldName, newName)); addStep(migrationStep); return this; } /** * Adds an instruction to delete a field from the entity in * the {@link org.dizitart.no2.repository.ObjectRepository}. * * @param fieldName the field name * @return the repository instruction */ default RepositoryInstruction deleteField(String fieldName) { MigrationStep migrationStep = new MigrationStep(); migrationStep.setInstructionType(InstructionType.RepositoryDeleteField); migrationStep.setArguments(new Triplet<>(entityName(), key(), fieldName)); addStep(migrationStep); return this; } /** * Adds an instruction to change the datatype of a field of the entity in * the {@link org.dizitart.no2.repository.ObjectRepository}. * * @param fieldName the field name * @param converter the converter * @return the repository instruction */ default RepositoryInstruction changeDataType(String fieldName, TypeConverter converter) { MigrationStep migrationStep = new MigrationStep(); migrationStep.setInstructionType(InstructionType.RepositoryChangeDataType); migrationStep.setArguments(new Quartet<>(entityName(), key(), fieldName, converter)); addStep(migrationStep); return this; } /** * Adds an instruction to change the id field of an entity in the * {@link org.dizitart.no2.repository.ObjectRepository} * * @param oldFieldNames the old field names * @param newFieldNames the new field names * @return the repository instruction */ default RepositoryInstruction changeIdField(List<String> oldFieldNames, List<String> newFieldNames) { Fields oldFields = Fields.withNames(oldFieldNames.toArray(new String[0])); Fields newFields = Fields.withNames(newFieldNames.toArray(new String[0])); return changeIdField(oldFields, newFields); } /** * Adds an instruction to change the id field of an entity in the * {@link org.dizitart.no2.repository.ObjectRepository} * * @param oldField the old field names * @param newField the new field names * @return the repository instruction */ default RepositoryInstruction changeIdField(Fields oldField, Fields newField) { MigrationStep migrationStep = new MigrationStep(); migrationStep.setInstructionType(InstructionType.RepositoryChangeIdField); migrationStep.setArguments(new Quartet<>(entityName(), key(), oldField, newField)); addStep(migrationStep); return this; } /** * Adds an instruction to drop an index from the {@link org.dizitart.no2.repository.ObjectRepository}. * * @param fieldNames the field names * @return the repository instruction */ default RepositoryInstruction dropIndex(String... fieldNames) { Fields fields = Fields.withNames(fieldNames); MigrationStep migrationStep = new MigrationStep(); migrationStep.setInstructionType(InstructionType.RepositoryDropIndex); migrationStep.setArguments(new Triplet<>(entityName(), key(), fields)); addStep(migrationStep); return this; } /** * Adds an instruction to drop all indices from the {@link org.dizitart.no2.repository.ObjectRepository}. * * @return the repository instruction */ default RepositoryInstruction dropAllIndices() { MigrationStep migrationStep = new MigrationStep(); migrationStep.setInstructionType(InstructionType.RepositoryDropIndices); migrationStep.setArguments(new Pair<>(entityName(), key())); addStep(migrationStep); return this; } /** * Adds an instruction to create an index in the {@link org.dizitart.no2.repository.ObjectRepository}. * * @param indexType the index type * @param fieldNames the field names * @return the repository instruction */ default RepositoryInstruction createIndex(String indexType, String... fieldNames) { Fields fields = Fields.withNames(fieldNames); MigrationStep migrationStep = new MigrationStep(); migrationStep.setInstructionType(InstructionType.RepositoryCreateIndex); migrationStep.setArguments(new Quartet<>(entityName(), key(), fields, indexType)); addStep(migrationStep); return this; } /** * The entity name of the {@link org.dizitart.no2.repository.ObjectRepository}. * * @return the string */ String entityName(); /** * The key of the {@link org.dizitart.no2.repository.ObjectRepository}. * * @return the string */ String key(); }
3,141
15,193
#----------------------------------------------------------------------------- # Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors. # All rights reserved. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- ''' ''' #----------------------------------------------------------------------------- # Boilerplate #----------------------------------------------------------------------------- from __future__ import annotations import logging # isort:skip log = logging.getLogger(__name__) #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Standard library imports import colorsys from math import sqrt from typing import TYPE_CHECKING # Bokeh imports from .color import Color if TYPE_CHECKING: from .hsl import HSL #----------------------------------------------------------------------------- # Globals and constants #----------------------------------------------------------------------------- __all__ = ( 'RGB', ) #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- class RGB(Color): ''' Represent colors by specifying their Red, Green, and Blue channels. Alpha values may also optionally be provided. Otherwise, alpha values default to 1. ''' def __init__(self, r: int, g: int, b: int, a: float = 1.0) -> None: ''' Args: r (int) : The value for the red channel in [0, 255] g (int) : The value for the green channel in [0, 255] b (int) : The value for the blue channel in [0, 255] a (float, optional) : An alpha value for this color in [0, 1] (default: 1.0) ''' self.r = r self.g = g self.b = b self.a = a def copy(self) -> RGB: ''' Return a copy of this color value. Returns: :class:`~bokeh.colors.rgb.RGB` ''' return RGB(self.r, self.g, self.b, self.a) @classmethod def from_hsl(cls, value: HSL) -> RGB: ''' Create an RGB color from an HSL color value. Args: value (HSL) : The HSL color to convert. Returns: :class:`~bokeh.colors.rgb.RGB` ''' return value.to_rgb() @classmethod def from_rgb(cls, value: RGB) -> RGB: ''' Copy an RGB color from another RGB color value. Args: value (:class:`~bokeh.colors.rgb.RGB`) : The RGB color to copy. Returns: :class:`~bokeh.colors.rgb.RGB` ''' return value.copy() def to_css(self) -> str: ''' Generate the CSS representation of this RGB color. Returns: str, ``"rgb(...)"`` or ``"rgba(...)"`` ''' if self.a == 1.0: return f"rgb({self.r}, {self.g}, {self.b})" else: return f"rgba({self.r}, {self.g}, {self.b}, {self.a})" def to_hex(self) -> str: ''' Return a hex color string for this RGB color. Any alpha value on this color is discarded, only hex color strings for the RGB components are returned. Returns: str, ``"#RRGGBB"`` ''' return "#%02X%02X%02X" % (self.r, self.g, self.b) def to_hsl(self) -> HSL: ''' Return a corresponding HSL color for this RGB color. Returns: :class:`~bokeh.colors.hsl.HSL` ''' from .hsl import HSL # prevent circular import h, l, s = colorsys.rgb_to_hls(float(self.r)/255, float(self.g)/255, float(self.b)/255) return HSL(round(h*360), s, l, self.a) def to_rgb(self) -> RGB: ''' Return a RGB copy for this RGB color. Returns: :class:`~bokeh.colors.rgb.RGB` ''' return self.copy() @property def brightness(self) -> float: """ Perceived brightness of a color in [0, 1] range. """ # http://alienryderflex.com/hsp.html r, g, b = self.r, self.g, self.b return sqrt(0.299*r**2 + 0.587*g**2 + 0.114*b**2)/255 #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
1,802
504
<reponame>mumtozvalijonov/fastapi_contrib from fastapi import FastAPI from jaeger_client import Tracer from fastapi_contrib.tracing.utils import setup_opentracing def test_setup_opentracing(): _app = FastAPI() setup_opentracing(_app) assert isinstance(_app.tracer, Tracer)
108
1,422
<gh_stars>1000+ /* * Copyright (C) 2012 The Flogger Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.flogger.parser; /** * Base class from which any specific message parsers are derived (e.g. {@link PrintfMessageParser} * and {@link BraceStyleMessageParser}). */ public abstract class MessageParser { /** * The maximum allowed index (this should correspond to the MAX_ALLOWED_WIDTH in * {@link com.google.common.flogger.backend.FormatOptions FormatOptions} because at times it is * ambiguous as to which is being parsed). */ public static final int MAX_ARG_COUNT = 1000000; /** * Abstract parse method implemented by specific subclasses to modify parsing behavior. * <p> * Note that when extending parsing behavior, it is expected that specific parsers such as * {@link DefaultPrintfMessageParser} or {@link DefaultBraceStyleMessageParser} will be * sub-classed. Extending this class directly is only necessary when an entirely new type of * format needs to be supported (which should be extremely rare). * <p> * Implementations of this method are required to invoke the * {@link MessageBuilder#addParameterImpl} method of the supplied builder once for each * parameter place-holder in the message. */ protected abstract <T> void parseImpl(MessageBuilder<T> builder) throws ParseException; /** * Appends the unescaped literal representation of the given message string (assumed to be escaped * according to this parser's escaping rules). This method is designed to be invoked from a * callback method in a {@link MessageBuilder} instance. * * @param out the destination into which to append characters * @param message the escaped log message * @param start the start index (inclusive) in the log message * @param end the end index (exclusive) in the log message */ public abstract void unescape(StringBuilder out, String message, int start, int end); }
667
2,209
from .embedding import Embedding from .embedding import load_from_file
20
1,253
# -*- coding: utf-8 -*- """Output related functions and classes for testing.""" from plaso.engine import knowledge_base from plaso.output import mediator from tests import test_lib as shared_test_lib class TestConfig(object): """Test configuration.""" class OutputModuleTestCase(shared_test_lib.BaseTestCase): """The unit test case for a output module.""" def _CreateOutputMediator(self, dynamic_time=True, storage_file=None): """Creates a test output mediator. Args: dynamic_time (Optional[bool]): True if date and time values should be represented in their granularity or semantically. storage_file (Optional[StorageFile]): storage file. Returns: OutputMediator: output mediator. """ knowledge_base_object = knowledge_base.KnowledgeBase() if storage_file: for session in storage_file.GetSessions(): for source_configuration in session.source_configurations or []: knowledge_base_object.ReadSystemConfigurationArtifact( source_configuration.system_configuration, session_identifier=session.identifier) output_mediator = mediator.OutputMediator( knowledge_base_object, data_location=shared_test_lib.TEST_DATA_PATH, dynamic_time=dynamic_time) return output_mediator
446
11,433
import pytest import sqlite3 from unittest.mock import call, Mock from allennlp.common.testing import AllenNlpTestCase from scripts.ai2_internal.resume_daemon import ( BeakerStatus, create_table, handler, logger, resume, start_autoresume, ) # Don't spam the log in tests. logger.removeHandler(handler) class ResumeDaemonTest(AllenNlpTestCase): def setup_method(self): super().setup_method() self.connection = sqlite3.connect(":memory:") create_table(self.connection) def test_create_beaker_status_works(self): status = BeakerStatus("stopped") assert status.name == "stopped" def test_create_beaker_status_throws(self): with pytest.raises(ValueError): status = BeakerStatus("garbage") assert status.name == "garbage" def test_does_nothing_on_empty_db(self): beaker = Mock() resume(self.connection, beaker) assert not beaker.method_calls def test_does_not_resume_a_running_experiment(self): beaker = Mock() experiment_id = "foo" start_autoresume(self.connection, experiment_id, 5) beaker.get_status.return_value = BeakerStatus.running resume(self.connection, beaker) beaker.get_status.assert_called() assert len(beaker.method_calls) == 1 def test_does_not_resume_a_finished_experiment(self): beaker = Mock() experiment_id = "foo" start_autoresume(self.connection, experiment_id, 5) beaker.get_status.return_value = BeakerStatus.succeeded resume(self.connection, beaker) beaker.get_status.assert_called() assert len(beaker.method_calls) == 1 def test_does_resume_a_preempted_experiment(self): beaker = Mock() experiment_id = "foo" start_autoresume(self.connection, experiment_id, 5) beaker.get_status.return_value = BeakerStatus.preempted beaker.resume.return_value = "foo2" resume(self.connection, beaker) beaker.get_status.assert_called() beaker.resume.assert_called() assert len(beaker.method_calls) == 2 def test_respects_upper_bound_on_resumes(self): beaker = Mock() experiment_id = "foo" start_autoresume(self.connection, experiment_id, 5) beaker.get_status.return_value = BeakerStatus.preempted for i in range(10): beaker.resume.return_value = f"foo{i}" resume(self.connection, beaker) calls = [ call.get_status("foo"), call.resume("foo"), call.get_status("foo0"), call.resume("foo0"), call.get_status("foo1"), call.resume("foo1"), call.get_status("foo2"), call.resume("foo2"), call.get_status("foo3"), call.resume("foo3"), call.get_status("foo4"), ] beaker.assert_has_calls(calls) def test_handles_a_realistic_scenario(self): beaker = Mock() experiment_id = "foo" start_autoresume(self.connection, experiment_id, 5) beaker.get_status.return_value = BeakerStatus.preempted for i in range(10): beaker.resume.return_value = f"foo{i}" if i == 2: beaker.get_status.return_value = BeakerStatus.succeeded resume(self.connection, beaker) calls = [ call.get_status("foo"), call.resume("foo"), call.get_status("foo0"), call.resume("foo0"), call.get_status("foo1"), ] beaker.assert_has_calls(calls)
1,692
2,381
<gh_stars>1000+ package com.github.dockerjava.cmd; import com.github.dockerjava.api.command.CreateContainerResponse; import com.github.dockerjava.api.exception.InternalServerErrorException; import com.github.dockerjava.api.exception.NotFoundException; import com.github.dockerjava.utils.ContainerUtils; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.emptyString; import static org.hamcrest.Matchers.not; public class UnpauseCmdIT extends CmdIT { public static final Logger LOG = LoggerFactory.getLogger(UnpauseCmdIT.class); @Test public void unpausePausedContainer() { CreateContainerResponse container = dockerRule.getClient().createContainerCmd("busybox").withCmd("sleep", "9999").exec(); LOG.info("Created container: {}", container.toString()); assertThat(container.getId(), not(is(emptyString()))); ContainerUtils.startContainer(dockerRule.getClient(), container); ContainerUtils.pauseContainer(dockerRule.getClient(), container); ContainerUtils.unpauseContainer(dockerRule.getClient(), container); } @Test(expected = InternalServerErrorException.class) public void unpauseRunningContainer() { CreateContainerResponse container = dockerRule.getClient().createContainerCmd("busybox").withCmd("sleep", "9999").exec(); LOG.info("Created container: {}", container.toString()); assertThat(container.getId(), not(is(emptyString()))); ContainerUtils.startContainer(dockerRule.getClient(), container); dockerRule.getClient().unpauseContainerCmd(container.getId()).exec(); } @Test(expected = InternalServerErrorException.class) public void unpauseStoppedContainer() { CreateContainerResponse container = dockerRule.getClient().createContainerCmd("busybox").withCmd("sleep", "9999").exec(); LOG.info("Created container: {}", container.toString()); assertThat(container.getId(), not(is(emptyString()))); ContainerUtils.startContainer(dockerRule.getClient(), container); ContainerUtils.stopContainer(dockerRule.getClient(), container); dockerRule.getClient().unpauseContainerCmd(container.getId()).exec(); } @Test(expected = NotFoundException.class) public void unpauseNonExistingContainer() { dockerRule.getClient().unpauseContainerCmd("non-existing").exec(); } @Test(expected = InternalServerErrorException.class) public void unpauseCreatedContainer() { CreateContainerResponse container = dockerRule.getClient().createContainerCmd("busybox").withCmd("sleep", "9999").exec(); LOG.info("Created container: {}", container.toString()); assertThat(container.getId(), not(is(emptyString()))); dockerRule.getClient().unpauseContainerCmd(container.getId()).exec(); } @Test(expected = InternalServerErrorException.class) public void unpauseUnpausedContainer() { CreateContainerResponse container = dockerRule.getClient().createContainerCmd("busybox").withCmd("sleep", "9999").exec(); LOG.info("Created container: {}", container.toString()); assertThat(container.getId(), not(is(emptyString()))); ContainerUtils.startContainer(dockerRule.getClient(), container); ContainerUtils.pauseContainer(dockerRule.getClient(), container); dockerRule.getClient().unpauseContainerCmd(container.getId()).exec(); dockerRule.getClient().unpauseContainerCmd(container.getId()).exec(); } }
1,184
524
{ "title": "networkconnectivity", "defaultService": "networkconnectivity\/readme", "services": [ { "title": "Overview", "type": "networkconnectivity\/readme" }, { "title": "HubServiceClient (v1)", "type": "networkconnectivity\/v1\/hubserviceclient" } ], "pattern": "networkconnectivity\/\\w{1,}" }
190
37,508
/************************************************************************************************** * * * This file is part of BLASFEO. * * * * BLASFEO -- BLAS For Embedded Optimization. * * Copyright (C) 2019 by <NAME>. * * Developed at IMTEK (University of Freiburg) under the supervision of Moritz Diehl. * * All rights reserved. * * * * The 2-Clause BSD License * * * * Redistribution and use in source and binary forms, with or without * * modification, are permitted provided that the following conditions are met: * * * * 1. Redistributions of source code must retain the above copyright notice, this * * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * * this list of conditions and the following disclaimer in the documentation * * and/or other materials provided with the distribution. * * * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * * * Author: <NAME>, gianluca.frison (at) imtek.uni-freiburg.de * * * **************************************************************************************************/ #ifndef BLASFEO_COMMON_H_ #define BLASFEO_COMMON_H_ #include "blasfeo_target.h" #ifdef __cplusplus extern "C" { #endif #if defined(__GNUC__) || defined(__clang__) || defined(__INTEL_COMPILER) || defined(__ICL) || defined(__ICC) || defined(__INTEL_LLVM_COMPILER) #define ALIGNED(VEC, BYTES) VEC __attribute__ ((aligned ( BYTES ))) #elif defined (_MSC_VER) #define ALIGNED(VEC, BYTES) __declspec(align( BYTES )) VEC #else #define ALIGNED(VEC, BYTES) VEC #endif #if ( defined(LA_HIGH_PERFORMANCE) & defined(MF_PANELMAJ) ) | ( defined(LA_REFERENCE) & defined(MF_PANELMAJ) ) #include "blasfeo_block_size.h" // matrix structure struct blasfeo_dmat { double *mem; // pointer to passed chunk of memory double *pA; // pointer to a pm*pn array of doubles, the first is aligned to cache line size double *dA; // pointer to a min(m,n) (or max???) array of doubles int m; // rows int n; // cols int pm; // packed number or rows int cn; // packed number or cols int use_dA; // flag to tell if dA can be used int memsize; // size of needed memory }; struct blasfeo_smat { float *mem; // pointer to passed chunk of memory float *pA; // pointer to a pm*pn array of floats, the first is aligned to cache line size float *dA; // pointer to a min(m,n) (or max???) array of floats int m; // rows int n; // cols int pm; // packed number or rows int cn; // packed number or cols int use_dA; // flag to tell if dA can be used int memsize; // size of needed memory }; // vector structure struct blasfeo_dvec { double *mem; // pointer to passed chunk of memory double *pa; // pointer to a pm array of doubles, the first is aligned to cache line size int m; // size int pm; // packed size int memsize; // size of needed memory }; struct blasfeo_svec { float *mem; // pointer to passed chunk of memory float *pa; // pointer to a pm array of floats, the first is aligned to cache line size int m; // size int pm; // packed size int memsize; // size of needed memory }; #define BLASFEO_DMATEL(sA,ai,aj) ((sA)->pA[((ai)-((ai)&(D_PS-1)))*(sA)->cn+(aj)*D_PS+((ai)&(D_PS-1))]) #define BLASFEO_SMATEL(sA,ai,aj) ((sA)->pA[((ai)-((ai)&(S_PS-1)))*(sA)->cn+(aj)*S_PS+((ai)&(S_PS-1))]) #define BLASFEO_DVECEL(sa,ai) ((sa)->pa[ai]) #define BLASFEO_SVECEL(sa,ai) ((sa)->pa[ai]) #elif ( defined(LA_HIGH_PERFORMANCE) & defined(MF_COLMAJ) ) | ( defined(LA_REFERENCE) & defined(MF_COLMAJ) ) | defined(LA_EXTERNAL_BLAS_WRAPPER) // matrix structure struct blasfeo_dmat { double *mem; // pointer to passed chunk of memory double *pA; // pointer to a m*n array of doubles double *dA; // pointer to a min(m,n) (or max???) array of doubles int m; // rows int n; // cols int use_dA; // flag to tell if dA can be used int memsize; // size of needed memory }; struct blasfeo_smat { float *mem; // pointer to passed chunk of memory float *pA; // pointer to a m*n array of floats float *dA; // pointer to a min(m,n) (or max???) array of floats int m; // rows int n; // cols int use_dA; // flag to tell if dA can be used int memsize; // size of needed memory }; // vector structure struct blasfeo_dvec { double *mem; // pointer to passed chunk of memory double *pa; // pointer to a m array of doubles, the first is aligned to cache line size int m; // size int memsize; // size of needed memory }; struct blasfeo_svec { float *mem; // pointer to passed chunk of memory float *pa; // pointer to a m array of floats, the first is aligned to cache line size int m; // size int memsize; // size of needed memory }; #define BLASFEO_DMATEL(sA,ai,aj) ((sA)->pA[(ai)+(aj)*(sA)->m]) #define BLASFEO_SMATEL(sA,ai,aj) ((sA)->pA[(ai)+(aj)*(sA)->m]) #define BLASFEO_DVECEL(sa,ai) ((sa)->pa[ai]) #define BLASFEO_SVECEL(sa,ai) ((sa)->pa[ai]) #else #error : wrong LA or MF choice #endif // Explicitly panel-major matrix structure struct blasfeo_pm_dmat { double *mem; // pointer to passed chunk of memory double *pA; // pointer to a pm*pn array of doubles, the first is aligned to cache line size double *dA; // pointer to a min(m,n) (or max???) array of doubles int m; // rows int n; // cols int pm; // packed number or rows int cn; // packed number or cols int use_dA; // flag to tell if dA can be used int ps; // panel size int memsize; // size of needed memory }; struct blasfeo_pm_smat { float *mem; // pointer to passed chunk of memory float *pA; // pointer to a pm*pn array of floats, the first is aligned to cache line size float *dA; // pointer to a min(m,n) (or max???) array of floats int m; // rows int n; // cols int pm; // packed number or rows int cn; // packed number or cols int use_dA; // flag to tell if dA can be used int ps; // panel size int memsize; // size of needed memory }; // Explicitly column-major matrix structure struct blasfeo_cm_dmat { double *mem; // pointer to passed chunk of memory double *pA; // pointer to a m*n array of doubles double *dA; // pointer to a min(m,n) (or max???) array of doubles int m; // rows int n; // cols int use_dA; // flag to tell if dA can be used int memsize; // size of needed memory }; struct blasfeo_cm_smat { float *mem; // pointer to passed chunk of memory float *pA; // pointer to a m*n array of floats float *dA; // pointer to a min(m,n) (or max???) array of floats int m; // rows int n; // cols int use_dA; // flag to tell if dA can be used int memsize; // size of needed memory }; #define BLASFEO_PM_DMATEL(sA,ai,aj) ((sA)->pA[((ai)-((ai)&((sA)->ps-1)))*(sA)->cn+(aj)*((sA)->ps)+((ai)&((sA)->ps-1))]) #define BLASFEO_PM_SMATEL(sA,ai,aj) ((sA)->pA[((ai)-((ai)&((sA)->ps-1)))*(sA)->cn+(aj)*((sA)->ps)+((ai)&((sA)->ps-1))]) #define BLASFEO_CM_DMATEL(sA,ai,aj) ((sA)->pA[(ai)+(aj)*(sA)->m]) #define BLASFEO_CM_SMATEL(sA,ai,aj) ((sA)->pA[(ai)+(aj)*(sA)->m]) #ifdef __cplusplus } #endif #endif // BLASFEO_COMMON_H_
4,390
348
<reponame>chamberone/Leaflet.PixiOverlay<gh_stars>100-1000 {"nom":"Labarthe","circ":"1ère circonscription","dpt":"Gers","inscrits":115,"abs":55,"votants":60,"blancs":4,"nuls":2,"exp":54,"res":[{"nuance":"REM","nom":"<NAME>","voix":32},{"nuance":"SOC","nom":"<NAME>","voix":22}]}
115
1,336
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.metrics.core; import java.util.IdentityHashMap; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.Executor; import java.util.concurrent.ThreadPoolExecutor; import org.apache.servicecomb.core.SCBEngine; import org.apache.servicecomb.core.definition.MicroserviceMeta; import org.apache.servicecomb.core.definition.OperationMeta; import org.apache.servicecomb.core.executor.GroupExecutor; import org.apache.servicecomb.core.executor.ThreadPoolExecutorEx; import org.apache.servicecomb.foundation.common.utils.BeanUtils; import org.apache.servicecomb.foundation.metrics.MetricsBootstrapConfig; import org.apache.servicecomb.foundation.metrics.MetricsInitializer; import org.apache.servicecomb.foundation.metrics.registry.GlobalRegistry; import com.google.common.eventbus.EventBus; import com.netflix.spectator.api.BasicTag; import com.netflix.spectator.api.Registry; import com.netflix.spectator.api.Tag; import com.netflix.spectator.api.patterns.PolledMeter; import com.netflix.spectator.api.patterns.ThreadPoolMonitor; public class ThreadPoolMetersInitializer implements MetricsInitializer { public static String REJECTED_COUNT = "threadpool.rejectedCount"; private Registry registry; @Override public void init(GlobalRegistry globalRegistry, EventBus eventBus, MetricsBootstrapConfig config) { registry = globalRegistry.getDefaultRegistry(); createThreadPoolMeters(); } public void createThreadPoolMeters() { Map<Executor, Executor> operationExecutors = collectionOperationExecutors(); // currently, all operation executors come from bean Map<String, Executor> beanExecutors = BeanUtils.getContext().getBeansOfType(Executor.class); for (Entry<String, Executor> entry : beanExecutors.entrySet()) { Executor executor = entry.getValue(); if (!operationExecutors.containsKey(executor)) { continue; } if (GroupExecutor.class.isInstance(executor)) { createThreadPoolMeters(entry.getKey(), (GroupExecutor) executor); continue; } createThreadPoolMeters(entry.getKey(), executor); } } protected Map<Executor, Executor> collectionOperationExecutors() { Map<Executor, Executor> operationExecutors = new IdentityHashMap<>(); //only one instance in the values MicroserviceMeta microserviceMeta = SCBEngine.getInstance().getProducerMicroserviceMeta(); for (OperationMeta operationMeta : microserviceMeta.getOperations()) { operationExecutors.put(operationMeta.getExecutor(), operationMeta.getExecutor()); } return operationExecutors; } protected void createThreadPoolMeters(String threadPoolName, GroupExecutor groupExecutor) { for (int idx = 0; idx < groupExecutor.getExecutorList().size(); idx++) { Executor executor = groupExecutor.getExecutorList().get(idx); createThreadPoolMeters(threadPoolName + "-group" + idx, executor); } } protected void createThreadPoolMeters(String threadPoolName, Executor executor) { if (!ThreadPoolExecutor.class.isInstance(executor)) { return; } ThreadPoolMonitor.attach(registry, (ThreadPoolExecutor) executor, threadPoolName); if (executor instanceof ThreadPoolExecutorEx) { Tag idTag = new BasicTag("id", threadPoolName); PolledMeter.using(registry) .withName(REJECTED_COUNT) .withTag(idTag) .monitorMonotonicCounter((ThreadPoolExecutorEx) executor, ThreadPoolExecutorEx::getRejectedCount); } } }
1,356
2,023
<reponame>tdiprima/code # Selective cleanup (deletion) of files (based on category and extension) # For use by a SABnzbd+ external post-processing script # Version: 1.05 # Date: 2009/07/19 # License: As-is; public domain # Requirements: Python 3.1, SABnzbd+ 0.4.11 # Description: This script clean's up (deletes) files with specific extensions, e.g. .sfv, .nzb, but only for downloads belonging to a particular category. # Remarks: # The cleanup is performed only in the job directory. Its subdirectories, if any, are not affected. # Extensions are case-insensitive. # Usage syntax: # C:\Python31\python.exe selective_cleanup.py job_directory job_category category_specified ext1 ext2 ... extLast # Usage examples: # C:\Python31\python.exe "D:\SABnzbd scripts\selective_cleanup.py" %1 %5 movies sfv # C:\Python31\python.exe "D:\SABnzbd scripts\selective_cleanup.py" %1 %5 "movies (hd)" sfv nzb # Keywords: # sabnzbd+, sabnzbd, post-processing, post-processing script, # delete, deletion, file deletion, extension, file cleanup, cleanup list import os, sys # Parse input arguments job_dir = sys.argv[1] job_cat = sys.argv[2] cleanup_cat = sys.argv[3] exts = sys.argv[4:] exts = [ext.lower() for ext in exts] os.chdir(job_dir) # Selectively delete files if job_cat == cleanup_cat: files = [i for i in os.listdir(job_dir) if os.path.isfile(i)] files = [f for f in files if os.path.splitext(f)[1][1:].lower() in exts] for f in files: os.remove(f) print('Deleted {}'.format(f))
566
450
<reponame>LiuLeif/onnc<gh_stars>100-1000 //===- ObjectWriter.h -----------------------------------------------------===// // // The ONNC Project // // See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// #ifndef ONNC_CORE_OBJECT_WRITER_H #define ONNC_CORE_OBJECT_WRITER_H #include <onnc/Support/OStream.h> namespace onnc { /** \class onnc::ObjectWriter * \brief provides interfaces for dumping compiler output. * * Deep learning accelerators don't have common standard for the output * file format. Some of them define their own format and some of them use * just memory image as the output format. ObjectWriter tries to provide * a common interface to encapsulate various formats in writing steps. */ class ObjectWriter { public: ObjectWriter(OStream& pOS); virtual ~ObjectWriter() = 0; OStream& getStream() { return *m_pOS; } void setStream(OStream& pOS) { m_pOS = &pOS; } private: OStream* m_pOS; }; } // namespace of onnc #endif
341
303
# basic closure # to write!
9
450
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hawq.ranger.service; import org.apache.hawq.ranger.model.HawqResource; import org.apache.ranger.plugin.service.ResourceLookupContext; import org.apache.ranger.plugin.util.TimedEventUtil; import org.apache.log4j.Logger; import java.sql.SQLException; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import java.util.Collections; public abstract class HawqResourceMgr { private static final Logger LOG = Logger.getLogger(HawqResourceMgr.class); public static List<String> getHawqResources(String serviceName, String serviceType, Map<String, String> configs, ResourceLookupContext context) throws Exception { final String userInput = context.getUserInput(); HawqResource hawqResource = HawqResource.valueOf(context.getResourceName().toUpperCase()); final Map<String, List<String>> resources = context.getResources(); List<String> result = null; if (serviceName != null && userInput != null) { try { if (LOG.isDebugEnabled()) { LOG.debug("==> HawqResourceMgr.getHawqResources() UserInput: " + userInput); } final HawqClient hawqClient = new HawqClient(serviceName, configs); Callable<List<String>> callableObj; switch (hawqResource) { case DATABASE: callableObj = new Callable<List<String>>() { @Override public List<String> call() throws SQLException { return hawqClient.getDatabaseList(userInput); } }; break; case TABLESPACE: callableObj = new Callable<List<String>>() { @Override public List<String> call() throws SQLException { return hawqClient.getTablespaceList(userInput); } }; break; case PROTOCOL: callableObj = new Callable<List<String>>() { @Override public List<String> call() throws SQLException { return hawqClient.getProtocolList(userInput); } }; break; case SCHEMA: callableObj = new Callable<List<String>>() { @Override public List<String> call() throws SQLException { return hawqClient.getSchemaList(userInput, resources); } }; break; case LANGUAGE: callableObj = new Callable<List<String>>() { @Override public List<String> call() throws SQLException { return hawqClient.getLanguageList(userInput, resources); } }; break; case TABLE: callableObj = new Callable<List<String>>() { @Override public List<String> call() throws SQLException { return hawqClient.getTableList(userInput, resources); } }; break; case SEQUENCE: callableObj = new Callable<List<String>>() { @Override public List<String> call() throws SQLException { return hawqClient.getSequenceList(userInput, resources); } }; break; case FUNCTION: callableObj = new Callable<List<String>>() { @Override public List<String> call() throws SQLException { return hawqClient.getFunctionList(userInput, resources); } }; break; default: throw new IllegalArgumentException("Resource requested does not exist."); } synchronized (hawqClient) { result = TimedEventUtil.timedTask(callableObj, 5, TimeUnit.SECONDS); } Collections.sort(result); } catch (Exception e) { LOG.error("Unable to get Hawq resources.", e); throw e; } } return result; } }
3,165
476
<gh_stars>100-1000 time_t *msg_list[16]; int menu() { puts(" Message System "); puts("---------------------"); puts("0 : Add a message"); puts("1 : Remove the message"); puts("2 : Show the message"); puts("3 : Change the timestamp"); return puts("---------------------"); } uint64_t add() { int i; // [rsp+Ch] [rbp-24h] int64_t size; // [rsp+10h] [rbp-20h] time_t *message; // [rsp+18h] [rbp-18h] void *buf; // [rsp+20h] [rbp-10h] uint64_t v5; // [rsp+28h] [rbp-8h] printf("Give me the message size : "); __isoc99_scanf("%lld", &size); if ( size > 16 && size <= 4095 ) { message = (time_t *)malloc(size + 8); *message = time(0LL); printf("Give me your meesage : ", &size); buf = message + 1; read(0, message + 1, size - 1); *((char *)buf + size - 1) = 0; for ( i = 0; i <= 15; ++i ) { if ( !msg_list[i] ) { msg_list[i] = message; puts("Done!!"); } } } } uint64_t remove() { int64_t index; // [rsp+0h] [rbp-10h] uint64_t v2; // [rsp+8h] [rbp-8h] printf("Give me index of the message : "); __isoc99_scanf("%lld", &index); if ( index >= 0 && index <= 15 && msg_list[index] ) { free(msg_list[index]); puts("Done!!"); } } uint64_t show() { char *v0; // rax int64_t index; // [rsp+0h] [rbp-10h] uint64_t v3; // [rsp+8h] [rbp-8h] printf("Give me index of the message : "); __isoc99_scanf("%lld", &index); if ( index >= 0 && index <= 15 && msg_list[index] ) { v0 = ctime(msg_list[index]); printf("Time : %sMessage : ", v0); puts((const char *)msg_list[index] + 8); } } uint64_t change_time_stamp() { int64_t index; // [rsp+0h] [rbp-10h] uint64_t v2; // [rsp+8h] [rbp-8h] printf("Give me index of the message : "); __isoc99_scanf("%lld", &index); *msg_list[index] += rand() % 10; puts("Done!!"); } int main() { int64_t *v3; // rsi int64_t v4; // [rsp+0h] [rbp-10h] uint64_t v5; // [rsp+8h] [rbp-8h] setvbuf(stdout, 0LL, 2, 0LL); v3 = 0LL; setvbuf(stdin, 0LL, 2, 0LL); srand(1u); while ( 1 ) { menu(); printf("choice : ", v3); v3 = &v4; __isoc99_scanf("%lld", &v4); if ( v4 ) { switch ( v4 ) { case 1LL: remove(); break; case 2LL: show(); break; case 3LL: change_time_stamp(); break; default: puts("Wrong Choice"); break; } } else { add(); } } }
1,579
852
#ifndef HiJetBackground_HiFJRhoProducer_h #define HiJetBackground_HiFJRhoProducer_h // system include files #include <memory> #include <sstream> #include <string> #include <vector> // user include files #include "FWCore/Framework/interface/Frameworkfwd.h" #include "FWCore/Framework/interface/stream/EDProducer.h" #include "FWCore/Framework/interface/Event.h" #include "FWCore/Framework/interface/MakerMacros.h" #include "FWCore/ParameterSet/interface/ParameterSet.h" #include "FWCore/Utilities/interface/StreamID.h" #include "DataFormats/JetReco/interface/Jet.h" // // class declaration // class HiFJRhoProducer : public edm::stream::EDProducer<> { public: explicit HiFJRhoProducer(const edm::ParameterSet&); ~HiFJRhoProducer() override; static void fillDescriptions(edm::ConfigurationDescriptions& descriptions); private: void beginStream(edm::StreamID) override; void produce(edm::Event&, const edm::EventSetup&) override; void endStream() override; double calcMedian(std::vector<double>& v); double calcMd(const reco::Jet* jet); bool isPackedCandidate(const reco::Candidate* candidate); // ----------member data --------------------------- //input edm::EDGetTokenT<edm::View<reco::Jet>> jetsToken_; //members edm::InputTag src_; // input kt jet source unsigned int nExcl_; //Number of leading jets to exclude double etaMaxExcl_; //max eta for jets to exclude double ptMinExcl_; //min pt for excluded jets unsigned int nExcl2_; //Number of leading jets to exclude in 2nd eta region double etaMaxExcl2_; //max eta for jets to exclude in 2nd eta region double ptMinExcl2_; //min pt for excluded jets in 2nd eta region std::vector<double> etaRanges; //eta boundaries for rho calculation regions bool checkJetCand, usingPackedCand; }; #endif
674
648
<reponame>swrobel/fhir {"resourceType":"ValueSet","id":"diet-type","meta":{"lastUpdated":"2015-10-24T07:41:03.495+11:00","profile":["http://hl7.org/fhir/StructureDefinition/valueset-shareable-definition"]},"text":{"status":"generated","div":"<div xmlns=\"http://www.w3.org/1999/xhtml\">\n <h2>Diet Codes</h2>\n <p>DietCode : Codes that can be used to indicate the type of food being ordered for a patient. This value set includes all children of SNOMED CT Concepts (US Extension and Core) from SCTID 182922004 Dietary regime (Therapeutic or Preventive Procedure) and is provided as a suggestive example.</p>\n <p>\n <b>Copyright Statement:</b> This value set includes content from SNOMED CT, which is copyright © 2002+ International Health Terminology Standards Development Organisation (IHTSDO), and distributed by agreement between IHTSDO and HL7. Implementer use of SNOMED CT is not covered by this agreement.\n </p>\n <p>This value set includes codes from the following code systems:</p>\n <ul>\n <li>Include codes from http://snomed.info/sct where concept is-a 182922004</li>\n </ul>\n </div>"},"extension":[{"url":"http://hl7.org/fhir/StructureDefinition/valueset-oid","valueUri":"urn:oid:2.16.840.1.113883.4.642.2.206"}],"url":"http://hl7.org/fhir/ValueSet/diet-type","version":"1.0.2","name":"Diet Codes","status":"draft","experimental":true,"publisher":"FHIR NutritionOrder team","contact":[{"telecom":[{"system":"other","value":"http://hl7.org/fhir"}]}],"date":"2015-10-24T07:41:03+11:00","description":"DietCode : Codes that can be used to indicate the type of food being ordered for a patient. This value set includes all children of SNOMED CT Concepts (US Extension and Core) from SCTID 182922004 Dietary regime (Therapeutic or Preventive Procedure) and is provided as a suggestive example.","copyright":"This value set includes content from SNOMED CT, which is copyright © 2002+ International Health Terminology Standards Development Organisation (IHTSDO), and distributed by agreement between IHTSDO and HL7. Implementer use of SNOMED CT is not covered by this agreement.","compose":{"include":[{"system":"http://snomed.info/sct","filter":[{"property":"concept","_property":{"fhir_comments":[" SNOMED CT - children of SNOMED CT Concepts (US Extension and Core) from SCTID 182922004 Dietary regime (Therapeutic or Preventive Procedure) "]},"op":"is-a","value":"182922004"}]}]}}
819
634
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.roots.ui.configuration.actions; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.roots.ContentFolder; import com.intellij.openapi.roots.ui.configuration.ContentEntryEditor; import com.intellij.openapi.roots.ui.configuration.ContentEntryTreeEditor; import com.intellij.openapi.vfs.VirtualFile; import consulo.roots.ContentFolderTypeProvider; import javax.swing.*; /** * @author <NAME> * @since Oct 14, 2003 */ public class ToggleFolderStateAction extends ContentEntryEditingAction { private final ContentEntryTreeEditor myEntryTreeEditor; private final ContentFolderTypeProvider myContentFolderType; public ToggleFolderStateAction(JTree tree, ContentEntryTreeEditor entryEditor, ContentFolderTypeProvider contentFolderType) { super(tree, contentFolderType.getName(), contentFolderType.getIcon()); myEntryTreeEditor = entryEditor; myContentFolderType = contentFolderType; } @Override public boolean displayTextInToolbar() { return true; } @Override public boolean isSelected(final AnActionEvent e) { final VirtualFile[] selectedFiles = getSelectedFiles(); if (selectedFiles.length == 0) return false; final ContentEntryEditor editor = myEntryTreeEditor.getContentEntryEditor(); final ContentFolder folder = editor.getFolder(selectedFiles[0]); return folder != null && folder.getType() == myContentFolderType; } @Override public void setSelected(final AnActionEvent e, final boolean isSelected) { final VirtualFile[] selectedFiles = getSelectedFiles(); assert selectedFiles.length != 0; final ContentEntryEditor contentEntryEditor = myEntryTreeEditor.getContentEntryEditor(); for (VirtualFile selectedFile : selectedFiles) { final ContentFolder contentFolder = contentEntryEditor.getFolder(selectedFile); if (isSelected) { if (contentFolder == null) { // not marked yet contentEntryEditor.addFolder(selectedFile, myContentFolderType); } else { if (myContentFolderType.equals(contentFolder.getType())) { contentEntryEditor.removeFolder(contentFolder); contentEntryEditor.addFolder(selectedFile, myContentFolderType); } } } else { if (contentFolder != null) { // already marked contentEntryEditor.removeFolder(contentFolder); } } } } }
938
7,137
package io.onedev.server.web.websocket; import javax.inject.Inject; import javax.inject.Singleton; import io.onedev.commons.loader.Listen; import io.onedev.server.event.build.BuildEvent; import io.onedev.server.model.Build; import io.onedev.server.model.Project; @Singleton public class BuildEventBroadcaster { private final WebSocketManager webSocketManager; @Inject public BuildEventBroadcaster(WebSocketManager webSocketManager) { this.webSocketManager = webSocketManager; } @Listen public void on(BuildEvent event) { Project project = event.getProject(); Build build = event.getBuild(); webSocketManager.notifyObservableChange(Build.getWebSocketObservable(build.getId())); String observable = "commit-status:" + project.getId() + ":" + build.getCommitHash(); webSocketManager.notifyObservableChange(observable); observable = "job-status:" + project.getId() + ":" + build.getCommitHash() + ":" + build.getJobName(); webSocketManager.notifyObservableChange(observable); } }
345
3,495
/* * * Copyright (c) 2020 Project CHIP Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "TestRetransmit.h" #include <lib/support/UnitTestRegistration.h> #include <transport/retransmit/Cache.h> #include <bitset> #include <nlunit-test.h> // Helpers for simple payload management namespace { constexpr int kMaxPayloadValue = 100; /** * Derived cache class containing some test helper methods. */ template <typename KeyType, typename PayloadType, size_t N> class TestableCache : public chip::Retransmit::Cache<KeyType, PayloadType, N> { public: /** * Convenience add when types are trivially copyable, so no actual * reference needs to be created. */ template <typename = std::enable_if<std::is_trivially_copyable<PayloadType>::value, int>> CHIP_ERROR AddValue(const KeyType & key, PayloadType payload) { return chip::Retransmit::Cache<KeyType, PayloadType, N>::Add(key, payload); } }; class IntPayloadTracker { public: void Init(nlTestSuite * suite) { mSuite = suite; } void Acquire(int value) { NL_TEST_ASSERT(mSuite, (value > 0) && value < kMaxPayloadValue); mAquired.set(static_cast<size_t>(value)); } void Release(int value) { NL_TEST_ASSERT(mSuite, (value > 0) && value < kMaxPayloadValue); NL_TEST_ASSERT(mSuite, mAquired.test(static_cast<size_t>(value))); mAquired.reset(static_cast<size_t>(value)); } size_t Count() const { return mAquired.count(); } bool IsAquired(int value) const { return mAquired.test(static_cast<size_t>(value)); } private: nlTestSuite * mSuite; std::bitset<kMaxPayloadValue> mAquired; }; IntPayloadTracker gPayloadTracker; /** * Helper class defining a matches method for things divisible by a * specific value. */ class DivisibleBy { public: DivisibleBy(int value) : mValue(value) {} bool Matches(int x) const { return (x % mValue) == 0; } private: const int mValue; }; } // namespace template <> int chip::Retransmit::Lifetime<int>::Acquire(int & value) { gPayloadTracker.Acquire(value); return value; } template <> void chip::Retransmit::Lifetime<int>::Release(int & value) { gPayloadTracker.Release(value); value = 0; // make sure it is not used anymore } namespace { void TestNoOp(nlTestSuite * inSuite, void * inContext) { // unused address cache should not do any aquire/release at any time NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 0); { TestableCache<int, int, 20> test; NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 0); } NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 0); } void TestDestructorFree(nlTestSuite * inSuite, void * inContext) { { TestableCache<int, int, 20> test; NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 0); NL_TEST_ASSERT(inSuite, test.AddValue(1, 1) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.AddValue(2, 2) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 2); } // destructor should release the items NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 0); } void OutOfSpace(nlTestSuite * inSuite, void * inContext) { { TestableCache<int, int, 4> test; NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 0); NL_TEST_ASSERT(inSuite, test.AddValue(1, 1) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.AddValue(2, 2) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.AddValue(3, 4) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.AddValue(4, 6) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 4); NL_TEST_ASSERT(inSuite, test.AddValue(5, 8) == CHIP_ERROR_NO_MEMORY); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 4); NL_TEST_ASSERT(inSuite, test.AddValue(6, 10) == CHIP_ERROR_NO_MEMORY); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 4); } NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 0); } void AddRemove(nlTestSuite * inSuite, void * inContext) { TestableCache<int, int, 3> test; NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 0); NL_TEST_ASSERT(inSuite, test.AddValue(1, 1) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.AddValue(2, 2) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.AddValue(3, 4) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 3); NL_TEST_ASSERT(inSuite, test.AddValue(10, 8) == CHIP_ERROR_NO_MEMORY); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 3); NL_TEST_ASSERT(inSuite, test.Remove(2) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 2); NL_TEST_ASSERT(inSuite, test.AddValue(10, 8) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 3); NL_TEST_ASSERT(inSuite, test.Remove(14) == CHIP_ERROR_KEY_NOT_FOUND); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 3); NL_TEST_ASSERT(inSuite, test.Remove(1) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 2); NL_TEST_ASSERT(inSuite, test.Remove(3) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 1); NL_TEST_ASSERT(inSuite, test.Remove(3) == CHIP_ERROR_KEY_NOT_FOUND); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 1); NL_TEST_ASSERT(inSuite, test.Remove(10) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 0); NL_TEST_ASSERT(inSuite, test.Remove(10) == CHIP_ERROR_KEY_NOT_FOUND); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 0); } void RemoveMatching(nlTestSuite * inSuite, void * inContext) { TestableCache<int, int, 4> test; NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 0); NL_TEST_ASSERT(inSuite, test.AddValue(1, 1) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.AddValue(2, 2) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.AddValue(3, 4) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.AddValue(4, 8) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 4); test.RemoveMatching(DivisibleBy(2)); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 2); // keys 1 and 3 remain NL_TEST_ASSERT(inSuite, gPayloadTracker.IsAquired(1)); NL_TEST_ASSERT(inSuite, gPayloadTracker.IsAquired(4)); NL_TEST_ASSERT(inSuite, test.Remove(3) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, gPayloadTracker.IsAquired(1)); NL_TEST_ASSERT(inSuite, !gPayloadTracker.IsAquired(4)); } void FindMatching(nlTestSuite * inSuite, void * inContext) { TestableCache<int, int, 4> test; NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 0); NL_TEST_ASSERT(inSuite, test.AddValue(1, 1) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.AddValue(2, 2) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.AddValue(3, 4) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.AddValue(4, 8) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, gPayloadTracker.Count() == 4); const int * key; const int * value; NL_TEST_ASSERT(inSuite, test.Find(DivisibleBy(20), &key, &value) == false); NL_TEST_ASSERT(inSuite, key == nullptr); NL_TEST_ASSERT(inSuite, value == nullptr); // This relies on linear add. May need changing if implementation changes NL_TEST_ASSERT(inSuite, test.Find(DivisibleBy(2), &key, &value) == true); NL_TEST_ASSERT(inSuite, *key == 2); NL_TEST_ASSERT(inSuite, *value == 2); NL_TEST_ASSERT(inSuite, test.Remove(*key) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.Find(DivisibleBy(2), &key, &value) == true); NL_TEST_ASSERT(inSuite, *key == 4); NL_TEST_ASSERT(inSuite, *value == 8); NL_TEST_ASSERT(inSuite, test.Remove(*key) == CHIP_NO_ERROR); NL_TEST_ASSERT(inSuite, test.Find(DivisibleBy(2), &key, &value) == false); NL_TEST_ASSERT(inSuite, key == nullptr); NL_TEST_ASSERT(inSuite, value == nullptr); } } // namespace // clang-format off static const nlTest sTests[] = { NL_TEST_DEF("NoOp", TestNoOp), NL_TEST_DEF("DestructorFree", TestDestructorFree), NL_TEST_DEF("OutOfSpace", OutOfSpace), NL_TEST_DEF("AddRemove", AddRemove), NL_TEST_DEF("RemoveMatching", RemoveMatching), NL_TEST_DEF("FindMatching", FindMatching), NL_TEST_SENTINEL() }; // clang-format on int TestCache(void) { nlTestSuite theSuite = { "Retransmit-Cache", &sTests[0], nullptr, nullptr }; gPayloadTracker.Init(&theSuite); nlTestRunner(&theSuite, nullptr); return nlTestRunnerStats(&theSuite); } CHIP_REGISTER_TEST_SUITE(TestCache)
3,892
419
<reponame>JuanluMorales/KRG #include "AIBehavior.h" #include "Engine/Animation/Graph/Animation_RuntimeGraph_Controller.h" #include "Engine/Physics/Components/Component_PhysicsCharacter.h" #include "Game/Core/Player/Components/Component_MainPlayer.h" //------------------------------------------------------------------------- namespace KRG::AI { BehaviorContext::~BehaviorContext() { KRG_ASSERT( m_pEntityWorldUpdateContext == nullptr && m_pNavmeshSystem == nullptr && m_pPhysicsScene == nullptr ); KRG_ASSERT( m_pCharacter == nullptr && m_pCharacterController == nullptr ); KRG_ASSERT( m_pAIComponent == nullptr && m_pAnimationController == nullptr ); } bool BehaviorContext::IsValid() const { if ( m_pAIComponent == nullptr ) { return false; } if ( m_pCharacter == nullptr || m_pCharacterController == nullptr || !m_pCharacter->IsRootComponent() ) { return false; } if ( m_pAnimationController == nullptr || !m_pAnimationController->HasSubGraphControllers() ) { return false; } return m_pEntityWorldUpdateContext != nullptr && m_pNavmeshSystem != nullptr && m_pPhysicsScene != nullptr; } }
486
1,585
/* * Copyright (C) by Argonne National Laboratory * See COPYRIGHT in top-level directory */ #ifndef MPL_TIMER_MACH_ABSOLUTE_TIME_H_INCLUDED #define MPL_TIMER_MACH_ABSOLUTE_TIME_H_INCLUDED #define MPLI_WTIME_IS_A_FUNCTION #include <mach/mach_time.h> #endif /* MPL_TIMER_MACH_ABSOLUTE_TIME_H_INCLUDED */
143
2,860
<filename>backend/api/python_http_client/test/test_api_periodic_schedule.py<gh_stars>1000+ # coding: utf-8 """ Kubeflow Pipelines API This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition. Contact: <EMAIL> Generated by: https://openapi-generator.tech """ from __future__ import absolute_import import unittest import datetime import kfp_server_api from kfp_server_api.models.api_periodic_schedule import ApiPeriodicSchedule # noqa: E501 from kfp_server_api.rest import ApiException class TestApiPeriodicSchedule(unittest.TestCase): """ApiPeriodicSchedule unit test stubs""" def setUp(self): pass def tearDown(self): pass def make_instance(self, include_optional): """Test ApiPeriodicSchedule include_option is a boolean, when False only required params are included, when True both required and optional params are included """ # model = kfp_server_api.models.api_periodic_schedule.ApiPeriodicSchedule() # noqa: E501 if include_optional : return ApiPeriodicSchedule( start_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), end_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), interval_second = '0' ) else : return ApiPeriodicSchedule( ) def testApiPeriodicSchedule(self): """Test ApiPeriodicSchedule""" inst_req_only = self.make_instance(include_optional=False) inst_req_and_optional = self.make_instance(include_optional=True) if __name__ == '__main__': unittest.main()
751
1,443
{ "copyright": "<NAME>", "url": "http://manparvesh.com", "email": "<EMAIL>", "theme": "material-teal", "gravatar": true }
58
363
<gh_stars>100-1000 package com.ess.filepicker.widget; import android.graphics.Rect; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.StaggeredGridLayoutManager; import android.view.View; import com.ess.filepicker.util.UiUtils; /** * MediaItemDecoration * Created by 李波 on 2018/3/2. */ public class MediaItemDecoration extends RecyclerView.ItemDecoration { private int getSpanCount(RecyclerView parent) { // 列数 int spanCount = -1; RecyclerView.LayoutManager layoutManager = parent.getLayoutManager(); if (layoutManager instanceof GridLayoutManager) { spanCount = ((GridLayoutManager) layoutManager).getSpanCount(); } else if (layoutManager instanceof StaggeredGridLayoutManager) { spanCount = ((StaggeredGridLayoutManager) layoutManager) .getSpanCount(); } return spanCount; } @Override public void getItemOffsets(Rect outRect, View view, RecyclerView parent, RecyclerView.State state) { int itemPosition = parent.getChildAdapterPosition(view); int divider = UiUtils.dpToPx(parent.getContext(), 4); int spanCount = getSpanCount(parent); int childCount = parent.getAdapter().getItemCount(); if (isLastRaw(parent, itemPosition, spanCount, childCount)) { outRect.set(0, 0, divider, 0); } else if (isLastColum(parent, itemPosition, spanCount, childCount)) { outRect.set(0, 0, 0, divider); } else { outRect.set(0, 0, divider, divider); } } private boolean isLastColum(RecyclerView parent, int pos, int spanCount, int childCount) { RecyclerView.LayoutManager layoutManager = parent.getLayoutManager(); if (layoutManager instanceof GridLayoutManager) { if ((pos + 1) % spanCount == 0)// 如果是最后一列,则不需要绘制右边 { return true; } } else if (layoutManager instanceof StaggeredGridLayoutManager) { int orientation = ((StaggeredGridLayoutManager) layoutManager) .getOrientation(); if (orientation == StaggeredGridLayoutManager.VERTICAL) { if ((pos + 1) % spanCount == 0)// 如果是最后一列,则不需要绘制右边 { return true; } } else { childCount = childCount - childCount % spanCount; if (pos >= childCount)// 如果是最后一列,则不需要绘制右边 return true; } } return false; } private boolean isLastRaw(RecyclerView parent, int pos, int spanCount, int childCount) { RecyclerView.LayoutManager layoutManager = parent.getLayoutManager(); if (layoutManager instanceof GridLayoutManager) { childCount = childCount - childCount % spanCount; if (pos >= childCount)// 如果是最后一行,则不需要绘制底部 return true; } else if (layoutManager instanceof StaggeredGridLayoutManager) { int orientation = ((StaggeredGridLayoutManager) layoutManager) .getOrientation(); // StaggeredGridLayoutManager 且纵向滚动 if (orientation == StaggeredGridLayoutManager.VERTICAL) { childCount = childCount - childCount % spanCount; // 如果是最后一行,则不需要绘制底部 if (pos >= childCount) return true; } else // StaggeredGridLayoutManager 且横向滚动 { // 如果是最后一行,则不需要绘制底部 if ((pos + 1) % spanCount == 0) { return true; } } } return false; } }
1,935
342
<filename>chaosblade-exec-plugin/chaosblade-exec-plugin-servlet/src/main/java/com/alibaba/chaosblade/exec/plugin/servlet/code/HttpCodeFlagSpec.java<gh_stars>100-1000 package com.alibaba.chaosblade.exec.plugin.servlet.code; import com.alibaba.chaosblade.exec.common.model.FlagSpec; /** * @author yefei */ public class HttpCodeFlagSpec implements FlagSpec { @Override public String getName() { return "code"; } @Override public String getDesc() { return "http code"; } @Override public boolean noArgs() { return false; } @Override public boolean required() { return false; } }
258
310
/*! * This file is part of GPBoost a C++ library for combining * boosting with Gaussian process and mixed effects models * * Copyright (c) 2020 <NAME>. All rights reserved. * * Licensed under the Apache License Version 2.0 See LICENSE file in the project root for license information. */ #ifndef GPBOOST_METRIC_RANDOM_EFFECTS_METRIC_HPP_ #define GPBOOST_METRIC_RANDOM_EFFECTS_METRIC_HPP_ #include <LightGBM/metric.h> #include <LightGBM/utils/log.h> #include <GPBoost/re_model.h> #include <string> #include <algorithm> #include <cmath> #include <vector> namespace LightGBM { /*! * \brief Metric when having a random effects model (re_model) for Gaussian data */ class NegLogLikelihood : public Metric { public: explicit NegLogLikelihood(const Config& config) :config_(config) { } virtual ~NegLogLikelihood() { } const std::vector<std::string>& GetName() const override { return name_; } double factor_to_bigger_better() const override { return -1.0f; } void Init(const Metadata&, data_size_t) override { if (!metric_for_train_data_) { Log::Fatal("The metric 'neg_log_likelihood' cannot be used for validation data, it can only be used for training data"); } } std::vector<double> Eval(const double* score, const ObjectiveFunction* objective) const override { double loss; if (metric_for_train_data_) { REModel* re_model = objective->GetGPModel(); re_model->EvalNegLogLikelihood(nullptr, nullptr, loss, score, false, false); } else { //loss = std::numeric_limits<double>::quiet_NaN();//gives an error loss = 0; } return std::vector<double>(1, loss); } private: /*! \brief Name of this metric */ std::vector<std::string> name_ = { "Negative log-likelihood" }; Config config_; }; /*! * \brief Metric when having a random effects model (re_model) for non-Gaussian data and inference is done using the Laplace approximation */ class LatenGaussianLaplace : public Metric { public: explicit LatenGaussianLaplace(const Config& config) :config_(config) { } virtual ~LatenGaussianLaplace() { } const std::vector<std::string>& GetName() const override { return name_; } double factor_to_bigger_better() const override { return -1.0f; } void Init(const Metadata&, data_size_t) override { if (!metric_for_train_data_) { Log::Fatal("The metric 'neg_log_likelihood' cannot be used for validation data, it can only be used for training data"); } } std::vector<double> Eval(const double* score, const ObjectiveFunction* objective) const override { double loss; if (metric_for_train_data_) { REModel* re_model = objective->GetGPModel(); re_model->EvalNegLogLikelihood(nullptr, nullptr, loss, score, false, false); } else { //loss = std::numeric_limits<double>::quiet_NaN();//gives an error loss = 0; } return std::vector<double>(1, loss); } private: /*! \brief Name of this metric */ std::vector<std::string> name_ = { "Approx. negative marginal log-likelihood" }; Config config_; }; } // namespace LightGBM #endif // GPBOOST_METRIC_RANDOM_EFFECTS_METRIC_HPP_
1,157