code
stringlengths 0
29.6k
| language
stringclasses 9
values | AST_depth
int64 3
30
| alphanumeric_fraction
float64 0.2
0.86
| max_line_length
int64 13
399
| avg_line_length
float64 5.02
139
| num_lines
int64 7
299
| source
stringclasses 4
values |
---|---|---|---|---|---|---|---|
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using FluentAssertions;
using NiallVR.Senko.Core.Locks;
using Xunit;
namespace Senko.Core.Tests.Unit.Locks;
public class SemaphoreQueueTests {
private readonly SemaphoreQueue _systemUnderTest = new(1);
[Fact(Timeout = 3000)]
public async Task WaitAsync_Should_ReturnATaskWhichComplete_When_NextInLine() {
// Arrange
var resultList = new List
async Task AddToResultsList(int index) {
await _systemUnderTest.WaitAsync();
resultList.Add(index);
_systemUnderTest.Release();
}
// Act
await Task.WhenAll(Enumerable.Range(0, 20).Select(AddToResultsList));
// Assert
resultList.Should().BeInAscendingOrder();
}
} | c# | 16 | 0.66586 | 83 | 27.517241 | 29 | starcoderdata |
package com.sutromedia.android.lib.util;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
public final class UtilStream {
private static final int BUFFER_SIZE = 1024;
public static void copy(
final InputStream in,
final OutputStream out,
final int bufferSize) throws IOException {
byte[] buffer = new byte[bufferSize];
int length;
while ((length = in.read(buffer)) != -1) {
out.write(buffer, 0, length);
}
in.close();
}
public static void copy(
final InputStream in,
final OutputStream out) throws IOException {
copy(in, out, BUFFER_SIZE);
}
} | java | 12 | 0.597015 | 52 | 22.566667 | 30 | starcoderdata |
func (r *dnsUpstreamClient) callExternal(msg *dns.Msg,
upstreamURL string, protocol RequestProtocol) (response *dns.Msg, rtt time.Duration, err error) {
if protocol == TCP {
response, rtt, err = r.tcpClient.Exchange(msg, upstreamURL)
if err != nil {
// try UDP as fallback
if t, ok := err.(*net.OpError); ok {
if t.Op == "dial" {
return r.udpClient.Exchange(msg, upstreamURL)
}
}
}
return response, rtt, err
}
if r.udpClient != nil {
return r.udpClient.Exchange(msg, upstreamURL)
}
return r.tcpClient.Exchange(msg, upstreamURL)
} | go | 16 | 0.668421 | 98 | 24.954545 | 22 | inline |
package com.patwolohan.FodlaFX.playlist;
import java.io.IOException;
import com.patwolohan.FodlaFX.App;
import com.patwolohan.FodlaFX.MenuController;
import com.patwolohan.FodlaFX.data.DataManagerSQLite;
import com.patwolohan.FodlaFX.utils.DateUtil;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.scene.Scene;
import javafx.scene.control.Alert;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.layout.AnchorPane;
import javafx.stage.Modality;
import javafx.stage.Stage;
import javafx.scene.control.Label;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.TextArea;
public class PlaylistFXBrowserController {
// Scene Builder fx:id associated with controls on the fxml definition
// The UI components
@FXML
private TableView playlistTable;
@FXML
private TableColumn<Playlist, Number> idColumn;
@FXML
private TableColumn<Playlist, String> titleColumn;
@FXML
private TableColumn<Playlist, String> descriptionColumn;
@FXML
private TableColumn<Playlist, String> themeColumn;
@FXML
private Label IDLabel;
@FXML
private Label titleLabel;
@FXML
private Label descriptionLabel;
@FXML
private Label themeLabel;
// The App
private App theApp;
// Data
private PlaylistDAO model;
// Message box
TextArea aTextArea;
// Playlist
Playlist currentlySelectedPlaylist;
// in the PlaylistDataView.fxml we have set the controller to use so this
// controller is instantiated when we load the .fxml version of the screen
// once this constructor is called next initialize is called
/**
* Constructor
*/
public PlaylistFXBrowserController() {
}
/**
* Called after constructor by .fxml loader.
*/
@FXML
private void initialize() {
// TABLE
// Initialize the playlist table with the two columns.
idColumn.setCellValueFactory(cellData -> cellData.getValue().playlistIDProperty());
titleColumn.setCellValueFactory(cellData -> cellData.getValue().playlistTitleProperty());
descriptionColumn.setCellValueFactory(cellData -> cellData.getValue().playlistDescriptionProperty());
themeColumn.setCellValueFactory(cellData -> cellData.getValue().playlistThemeProperty());
// RECORD
showPlaylistDetails(null);
// EVENTS
// listen for clicks on table rows - using lambda function
playlistTable.getSelectionModel().selectedItemProperty()
.addListener((observable, oldValue, newValue) -> showPlaylistDetails(newValue));
}
/**
* Set Model
*
* called from the MenuController to associate the model with this controller
*/
public void setModel(PlaylistDAO model) {
this.model = model;
// Add observable list data to the table
playlistTable.setItems(model.getAllPlaylist());
}
/**
* set parent application so can get the primary stage.
*/
public void setApp(App theApp) {
this.theApp = theApp;
}
/**
* set messagebox.
*/
public void setMessagebox(TextArea aTextArea) {
this.aTextArea = aTextArea;
}
/**
* display playlist details.
*/
public void showPlaylistDetails(Playlist playlist) {
if (playlist != null) {
IDLabel.setText(Integer.toString(playlist.getPlaylistID()));
titleLabel.setText(playlist.getPlaylistTitle());
descriptionLabel.setText(playlist.getPlaylistDescription());
themeLabel.setText(playlist.getPlaylistTheme());
// so we know the playlist in focus for a delete or edit
this.currentlySelectedPlaylist = playlist;
} else {
IDLabel.setText("-");
titleLabel.setText("-");
descriptionLabel.setText("-");
themeLabel.setText("-");
}
}
/**
* edit button.
*/
@FXML
private void handleEditPlaylist() {
Playlist selectedPlaylist = playlistTable.getSelectionModel().getSelectedItem();
// Check a selection has been made
if (selectedPlaylist != null) {
// here would display edit playlist panel and do database update
this.aTextArea.setText("Edit playlist button pressed");
} else {
// Nothing selected.
Alert alert = new Alert(AlertType.WARNING);
alert.initOwner(theApp.getPrimaryStage());
alert.setTitle("No Selection");
alert.setHeaderText("No playlist Selected");
alert.setContentText("Please select playlist in the table.");
alert.showAndWait();
}
}
/**
* new button.
*/
@FXML
private void handleNewPlaylist() {
// here would display new Playlist panel and do database insert
this.aTextArea.setText("New Playlist button pressed");
}
/**
* delete button.
*/
@FXML
private void handleDeletePlaylist() {
int selectedIndex = playlistTable.getSelectionModel().getSelectedIndex();
// if nothing left in list
if (selectedIndex >= 0) {
// does not delete in database only removes from the list
playlistTable.getItems().remove(selectedIndex);
} else {
// Nothing selected.
Alert alert = new Alert(AlertType.WARNING);
alert.initOwner(theApp.getPrimaryStage());
alert.setTitle("No Selection");
alert.setHeaderText("No Playlist Selected");
alert.setContentText("Please select Playlist in the table.");
alert.showAndWait();
}
}
} | java | 12 | 0.739707 | 103 | 24.246305 | 203 | starcoderdata |
using System.Collections.Generic;
using Entitas.Generics;
namespace Entitas.MatchLine
{
public sealed class ScoringTableComponent : IValueComponent IUniqueComponent
{
public List Value { get; set; }
}
} | c# | 9 | 0.743682 | 92 | 24.272727 | 11 | starcoderdata |
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
namespace TDAmeritrade.Services.PriceHistory.Models
{
public class CandleList
{
[JsonProperty("candles")]
public IList Candles{ get; set; }
[JsonProperty("empty")]
public bool Empty { get; set; }
[JsonProperty("symbol")]
public string Symbol { get; set; }
}
} | c# | 11 | 0.636816 | 51 | 21.388889 | 18 | starcoderdata |
<?php
namespace Pagevamp\Resizer;
use Illuminate\Support\Facades\Storage;
use Pagevamp\Uploader\UploadableInterface;
use Pagevamp\Uploader\Uploader;
class ResizedImage implements UploadableInterface
{
private $resizedImage;
private $originalImage;
private $uploader;
private $sizeName;
private $sizeRatio;
private $name;
private $uploadPath;
public function __construct($originalImage, $resizedImage, $sizeName, $sizeRatio)
{
$this->sizeName = $sizeName;
$this->originalImage = $originalImage;
$this->resizedImage = $resizedImage;
$this->name = $this->originalImage->getName();
$this->uploadPath = $sizeName;
$this->sizeRatio = $sizeRatio;
}
public function setName($name)
{
$this->name = $name;
}
public function getSizeName()
{
return $this->sizeName;
}
public function getSizeRatio()
{
return $this->sizeRatio;
}
public function getName()
{
return $this->name;
}
public function getContent()
{
return $this->resizedImage->stream();
}
public function getPath()
{
return $this->uploadPath;
}
public function upload(): Uploader
{
$this->uploader = (new Uploader($this, app(Storage::class), $this->getPath()))->upload();
return $this->uploader;
}
public function getUploadedfileUrl()
{
return $this->uploader->getUploadedfileUrl();
}
public function getRelativePath()
{
return $this->uploader->getRelativePath();
}
} | php | 16 | 0.616532 | 97 | 19.628205 | 78 | starcoderdata |
TEST_P(PpnAddEgressRequest, TestPpnRequest) {
AddEgressRequest request;
// Use the actual crypto utils to ensure the base64 encoded strings are sent
// in Json requests.
crypto::SessionCrypto crypto(&config_);
auto keys = crypto.GetMyKeyMaterial();
HttpResponse response;
response.set_json_body(R"json({"jwt": "some_jwt_token"})json");
auto auth_response = std::make_shared<AuthAndSignResponse>();
KryptonConfig config;
config.add_copper_hostname_suffix("g-tun.com");
EXPECT_OK(auth_response->DecodeFromProto(response, config));
AddEgressRequest::PpnDataplaneRequestParams params;
params.auth_response = auth_response;
params.crypto = &crypto;
params.copper_control_plane_address = kCopperControlPlaneAddress;
params.dataplane_protocol = KryptonConfig::BRIDGE;
params.suite = ppn::PpnDataplaneRequest::AES128_GCM;
params.is_rekey = false;
if (GetParam()) {
// Blind signing is enabled.
params.blind_token_enabled = true;
params.blind_message = "raw message";
params.unblinded_token_signature = "raw message signature";
} else {
params.blind_token_enabled = false;
}
params.region_token_and_signature = "raw region and sig";
params.apn_type = "ppn";
auto http_request = request.EncodeToProtoForPpn(params);
EXPECT_TRUE(http_request);
Json::Value actual;
Json::Reader reader;
reader.parse(http_request.value().json_body(), actual);
// Round-tripping through serialization causes int values to randomly be int
// or uint, so we need to test each value separately.
if (GetParam()) {
EXPECT_EQ(actual["unblinded_token"], "raw message");
EXPECT_EQ(actual["unblinded_token_signature"], "raw message signature");
EXPECT_EQ(actual["is_unblinded_token"], true);
} else {
EXPECT_EQ(actual["unblinded_token"], "some_jwt_token");
}
EXPECT_EQ(actual["region_token_and_signature"], "raw region and sig");
EXPECT_EQ(actual["ppn"]["apn_type"], "ppn");
EXPECT_EQ(actual["ppn"]["client_public_value"], keys.public_value);
EXPECT_EQ(actual["ppn"]["client_nonce"], keys.nonce);
EXPECT_EQ(actual["ppn"]["control_plane_sock_addr"],
absl::StrCat(kCopperControlPlaneAddress, ":1849"));
EXPECT_EQ(actual["ppn"]["downlink_spi"].asUInt(), crypto.downlink_spi());
EXPECT_EQ(actual["ppn"]["suite"], "AES128_GCM");
EXPECT_EQ(actual["ppn"]["dataplane_protocol"], "BRIDGE");
EXPECT_EQ(actual["ppn"]["rekey_verification_key"],
crypto.GetRekeyVerificationKey().ValueOrDie());
} | c++ | 12 | 0.704655 | 78 | 39.868852 | 61 | inline |
#ifndef COMMON_H
#define COMMON_H
#include "static.h"
/*
When using gSPVertex w/ ref.fifo, n (1-64) and v0 (0-63)
In regular (fifo), n (1-32) and v0 (0-31)
*/
/* Symbols used by spec */
#define STATIC_SEGMENT 1
#define CFB_SEGMENT 2
/* RDP FIFO output buffer len (min. 0x410 for fifo, 0x600 for ref.fifo) */
#define RDP_OUTPUT_LEN ((1024 * 4) * 16)
/* Thread stack */
#define STACK_SIZE 0x2000
/* Threads */
#define THRD_ID_MAX 12
#define THRD_IDLE_ID OS_PRIORITY_IDLE
#define THRD_MAIN_ID (THRD_IDLE_ID + 1)
/* Screen width & height */
#define SCREEN_W 320
#define SCREEN_H 240
/* Macro for 32-bit RGBA color */
#define GPACK_RGBA5551_32(r, g, b, a) \
(GPACK_RGBA5551(r, g, b, a) << 16 | GPACK_RGBA5551(r, g, b, a))
/* Length of Gfx list */
#define GFXLIST_LEN (1024 * 2)
/* Non-mipmapped textxure setup */
#define GFX_TEX_RGBA(tex, comb) \
gsDPPipeSync(), \
gsDPSetCombineMode((comb), (comb)), \
gsDPSetTexturePersp(G_TP_PERSP), \
gsDPSetTextureLOD(G_TL_TILE), \
gsDPSetTextureFilter(G_TF_BILERP), \
gsDPSetTextureConvert(G_TC_FILT), \
gsDPSetTextureLUT(G_TT_NONE), \
gsDPLoadTextureBlock((tex), G_IM_FMT_RGBA, G_IM_SIZ_16b, 16, 16, 0, \
G_TX_WRAP | G_TX_NOMIRROR, G_TX_WRAP | G_TX_NOMIRROR, \
G_TX_NOMASK, G_TX_NOMASK, G_TX_NOLOD, G_TX_NOLOD), \
/* This structure holds the things which change per frame. It is advantageous
to keep dynamic data together so that we may selectively write back dirty
data cache lines to DRAM prior to processing by the RCP */
typedef struct {
Mtx model, view, projection; /* MVP matrices */
Gfx gfxlist[GFXLIST_LEN]; /* Gfx list for the RDP */
} DynamicGfx;
extern DynamicGfx dyn_gfx;
#ifdef _LANGUAGE_C /* Needed because spec includes this file */
/* RSP matrix stack */
extern u64 dram_stack[];
/* RDP microcode FIFO command buffer */
extern u64 rdp_output[];
/* Color frame buffer */
extern u16 cfb[][SCREEN_W * SCREEN_H];
/* RSP color frame buffer */
extern u16 rsp_cfb[];
/* Z buffer */
extern u16 zbuffer[];
#endif
#endif | c | 7 | 0.655498 | 77 | 25.179487 | 78 | starcoderdata |
<?php
/**
* Created by PhpStorm.
* User: alex
* Date: 02.01.19
* Time: 1:46
*/
namespace backend\helper;
use backend\models\Portfolio;
use backend\models\Post;
use backend\models\Settings;
use backend\models\User;
/**
* Class HelperImageFolder
* @package backend\helper
*/
class HelperImageFolder
{
private $pattern = '/\.(jpg)|(jpeg)|(bmp)|(png)/';
/**
* @var string
*/
private $path_static;
/**
* @var array
*/
public $array_image;
/**
* @var string
*/
public $path;
/**
* HelperImageFolder constructor.
* @param Portfolio $portfolio
* @param Post $post
*/
public function __construct()
{
$this->path = \Yii::$app->basePath . '/web/images/';
/*transfer variable to view in any controller*/
$this->array_image = $this->getTrashArrayPhoto();
$this->path_static = \Yii::$app->basePath . '/web/images/staticimg/';
}
/**
* @param $str
* @return string|string[]|null
*/
public static function removeImgTags($str)
{
return preg_replace('# '', $str);
}
/**
* @param $portfolio
* @param $posts
* @return array
*/
private function getTrashArrayPhoto()
{
$imgportfolio = [];
$imgpost = [];
$imguser = [];
$imgpostthumb = [];
$onlyimg = [];
$allimg = scandir($this->path);
$portfolio = Portfolio::find()->all();
$posts = Post::find()->all();
$users = User::find()->all();
foreach ($allimg as $img) {
if (preg_match($this->pattern, $img)) {
$onlyimg[] = $img;
}
}
foreach ($portfolio as $img) {
$imgs = explode(',', $img->img);
foreach ($imgs as $imgitem) {
$imgportfolio[] = basename($imgitem);
}
}
foreach ($posts as $post_img) {
$imgpost[] = basename($post_img->img);
$imgpostthumb[] = basename($post_img->thumb_img);
$allimginpost = array_merge($imgpost, $imgpostthumb);
}
foreach ($users as $user) {
$imguser[] = basename($user->user_img);
}
$global_array_img = array_merge($imgportfolio, $allimginpost, $imguser);
$delete_img = array_diff($onlyimg, $global_array_img);
return $delete_img;
}
/**
* @return string
*/
public function deleteTrashImg()
{
$delete_img = $this->getTrashArrayPhoto();
if ($delete_img) {
foreach ($delete_img as $img) {
$file_delete = $this->path . $img;
if (file_exists($file_delete)) {
unlink($file_delete);
\Yii::$app->session->setFlash('success', 'Старые картинки удалены');
}
}
}
}
/**
* @return array
*/
public function staticFolderImage()
{
$get_all_img = scandir($this->path_static);
$onlyimgs = [];
foreach ($get_all_img as $img) {
if (preg_match($this->pattern, $img) && filesize($this->path_static . $img) > Settings::get(Settings::FILESIZE_FILE_COMPRESSION)) {
$onlyimgs[] = $this->path_static . $img;
}
}
return $onlyimgs;
}
/**
* @return array
*/
public function getAllImages()
{
$dir_images = scandir($this->path);
$dir_static_img = scandir($this->path_static);
$img_src = [];
$img_src_static = [];
setlocale(LC_ALL, 'ru_RU', 'ru_RU.UTF-8', 'ru', 'russian');
foreach ($dir_images as $img) {
if (preg_match($this->pattern, $img)) {
$img_src['/admin/images/' . $img] = strftime("%B %d, %Y", date(filemtime(\Yii::$app->basePath . '/web/images/' . $img)));
}
}
arsort($img_src);
foreach ($dir_static_img as $img) {
if (preg_match($this->pattern, $img)) {
$img_src_static['/admin/images/staticimg/' . $img] = strftime("%B %d, %Y", date(filemtime(\Yii::$app->basePath . '/web/images/staticimg/' . $img)));
}
}
arsort($img_src_static);
return array_merge($img_src, $img_src_static);
}
} | php | 26 | 0.502971 | 164 | 24.899408 | 169 | starcoderdata |
package nikita488.zycraft.init;
import com.tterrag.registrate.Registrate;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.text.IFormattableTextComponent;
import net.minecraft.util.text.TextFormatting;
import net.minecraft.util.text.TranslationTextComponent;
import nikita488.zycraft.ZYCraft;
public class ZYLang
{
private static final Registrate REGISTRATE = ZYCraft.registrate();
public static final IFormattableTextComponent SHIFT = REGISTRATE.addLang("tooltip", ZYCraft.id("shortcut"), "shift", "Shift")
.withStyle(TextFormatting.GREEN);
public static final IFormattableTextComponent RIGHT_CLICK = REGISTRATE.addLang("tooltip", ZYCraft.id("shortcut"), "right_click", "Right click")
.withStyle(TextFormatting.GREEN);
public static final IFormattableTextComponent SHIFT_RIGHT_CLICK = REGISTRATE.addLang("tooltip", ZYCraft.id("shortcut"), "shift_right_click", "Shift + Right click")
.withStyle(TextFormatting.GREEN);
public static final IFormattableTextComponent TOOLTIP_HINT = addLangWithArgs("tooltip", ZYCraft.id("hint"), "Press %s for info", SHIFT)
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final IFormattableTextComponent CREATIVE_ONLY = REGISTRATE.addLang("tooltip", ZYCraft.id("creative_only"), "Creative-only")
.withStyle(TextFormatting.ITALIC, TextFormatting.YELLOW);
public static final IFormattableTextComponent COLORABLE = REGISTRATE.addLang("tooltip", ZYCraft.id("colorable"), "Colorable")
.withStyle(TextFormatting.ITALIC, TextFormatting.YELLOW);
public static final IFormattableTextComponent COLORABLE_RED = REGISTRATE.addLang("tooltip", ZYCraft.id("colorable"), "red", "Red")
.withStyle(TextFormatting.RED);
public static final IFormattableTextComponent COLORABLE_GREEN = REGISTRATE.addLang("tooltip", ZYCraft.id("colorable"), "green", "Green")
.withStyle(TextFormatting.GREEN);
public static final IFormattableTextComponent COLORABLE_BLUE = REGISTRATE.addLang("tooltip", ZYCraft.id("colorable"), "blue", "Blue Zychorium")
.withStyle(TextFormatting.BLUE);
public static final IFormattableTextComponent COLORABLE_DARK = REGISTRATE.addLang("tooltip", ZYCraft.id("colorable"), "dark", "Dark Zychorium")
.withStyle(TextFormatting.DARK_GRAY);
public static final IFormattableTextComponent COLORABLE_LIGHT = REGISTRATE.addLang("tooltip", ZYCraft.id("colorable"), "light", "Light Zychorium")
.withStyle(TextFormatting.WHITE);
public static final IFormattableTextComponent COLORABLE_ANY_DYE = REGISTRATE.addLang("tooltip", ZYCraft.id("colorable"), "any_dye", "Any Dye")
.withStyle(TextFormatting.YELLOW);
public static final IFormattableTextComponent COLORABLE_INFO = addLangWithArgs("tooltip", ZYCraft.id("colorable"), "info", "%s or %s on this block with one of the following:", RIGHT_CLICK, SHIFT_RIGHT_CLICK)
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final IFormattableTextComponent COLORABLE_RGB = addLangWithArgs("tooltip", ZYCraft.id("colorable"), "rgb", "%s, %s or %s - to adjust each individual RGB", COLORABLE_RED, COLORABLE_GREEN, COLORABLE_BLUE)
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final IFormattableTextComponent COLORABLE_BRIGHTNESS = addLangWithArgs("tooltip", ZYCraft.id("colorable"), "brightness", "%s - to adjust color brightness", COLORABLE_DARK)
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final IFormattableTextComponent COLORABLE_RESET = addLangWithArgs("tooltip", ZYCraft.id("colorable"), "reset", "%s - to reset color", COLORABLE_LIGHT)
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final IFormattableTextComponent COLORABLE_DYE = addLangWithArgs("tooltip", ZYCraft.id("colorable"), "dye", "%s - to apply color of the dye", COLORABLE_ANY_DYE)
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final IFormattableTextComponent COLOR_SCANNER_APPLY = addLangWithArgs("tooltip", ZYCraft.id("color_scanner"), "apply", "%s on %s block to apply color", RIGHT_CLICK, COLORABLE)
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final IFormattableTextComponent COLOR_SCANNER_COPY = addLangWithArgs("tooltip", ZYCraft.id("color_scanner"), "copy", "%s on %s block to copy color", SHIFT_RIGHT_CLICK, COLORABLE)
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final IFormattableTextComponent COLOR_SCANNER_CURRENT_COLOR = REGISTRATE.addLang("tooltip", ZYCraft.id("color_scanner"), "current_color", "Current color:")
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final TranslationTextComponent COLOR_SCANNER_RED = (TranslationTextComponent)REGISTRATE.addLang("tooltip", ZYCraft.id("color_scanner"), "red", "Red: %s")
.withStyle(TextFormatting.ITALIC, TextFormatting.RED);
public static final TranslationTextComponent COLOR_SCANNER_GREEN = (TranslationTextComponent)REGISTRATE.addLang("tooltip", ZYCraft.id("color_scanner"), "green", "Green: %s")
.withStyle(TextFormatting.ITALIC, TextFormatting.GREEN);
public static final TranslationTextComponent COLOR_SCANNER_BLUE = (TranslationTextComponent)REGISTRATE.addLang("tooltip", ZYCraft.id("color_scanner"), "blue", "Blue: %s")
.withStyle(TextFormatting.ITALIC, TextFormatting.BLUE);
public static final TranslationTextComponent SCYTHE_DURABILITY = REGISTRATE.addLang("tooltip", ZYCraft.id("scythe"), "durability", "Durability: %s%%");
public static final IFormattableTextComponent INTERFACE = REGISTRATE.addLang("tooltip", ZYCraft.id("interface"), "Interface")
.withStyle(TextFormatting.ITALIC, TextFormatting.YELLOW);
public static final IFormattableTextComponent VALVE_INFO = REGISTRATE.addLang("tooltip", ZYCraft.id("valve"), "info", "Allows for fluids to pass through")
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final IFormattableTextComponent VALVE_FEATURE = REGISTRATE.addLang("tooltip", ZYCraft.id("valve"), "feature", "Fluids balance between valves")
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final IFormattableTextComponent ITEM_IO_INFO = REGISTRATE.addLang("tooltip", ZYCraft.id("item_io"), "info", "Allows for items to pass through")
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final IFormattableTextComponent ITEM_IO_FEATURE = REGISTRATE.addLang("tooltip", ZYCraft.id("item_io"), "feature", "Increases control over flow of items")
.withStyle(TextFormatting.ITALIC, TextFormatting.GRAY);
public static final TranslationTextComponent FLUID_TANK_FILLED = (TranslationTextComponent)REGISTRATE.addLang("tooltip", ZYCraft.id("fluid_tank"), "filled", "%s/%s mB")
.withStyle(TextFormatting.GRAY);
public static final TranslationTextComponent FABRICATOR = REGISTRATE.addLang("container", ZYCraft.id("fabricator"), "Fabricator");
public static final TranslationTextComponent TANK = REGISTRATE.addLang("container", ZYCraft.id("tank"), "Multi-Tank");
public static final TranslationTextComponent NARRATE_GAUGE = REGISTRATE.addLang("gui", ZYCraft.id("narrate"), "gauge", "%s gauge: filled %s of %s");
public static final TranslationTextComponent FABRICATOR_AUTO_LOW = REGISTRATE.addLang("mode", ZYCraft.id("fabricator"), "auto_low", "Auto: Low");
public static final TranslationTextComponent FABRICATOR_AUTO_HIGH = REGISTRATE.addLang("mode", ZYCraft.id("fabricator"), "auto_high", "Auto: High");
public static final TranslationTextComponent FABRICATOR_PULSE = REGISTRATE.addLang("mode", ZYCraft.id("fabricator"), "pulse", "Pulse");
public static final TranslationTextComponent VALVE_IN = (TranslationTextComponent)REGISTRATE.addLang("mode", ZYCraft.id("valve"), "in", "Input")
.withStyle(TextFormatting.BLUE);
public static final TranslationTextComponent VALVE_OUT = (TranslationTextComponent)REGISTRATE.addLang("mode", ZYCraft.id("valve"), "out", "Output")
.withStyle(TextFormatting.GOLD);
public static final TranslationTextComponent ITEM_IO_ANY = (TranslationTextComponent)REGISTRATE.addLang("mode", ZYCraft.id("item_io"), "any", "Input/Output")
.withStyle(TextFormatting.WHITE);
public static final TranslationTextComponent ITEM_IO_ALL_IN = (TranslationTextComponent)REGISTRATE.addLang("mode", ZYCraft.id("item_io"), "all_in", "Input")
.withStyle(TextFormatting.BLUE);
public static final TranslationTextComponent ITEM_IO_ALL_OUT = (TranslationTextComponent)REGISTRATE.addLang("mode", ZYCraft.id("item_io"), "all_out", "Output")
.withStyle(TextFormatting.GOLD);
public static final TranslationTextComponent ITEM_IO_IN1 = (TranslationTextComponent)REGISTRATE.addLang("mode", ZYCraft.id("item_io"), "in1", "Input1")
.withStyle(TextFormatting.DARK_PURPLE);
public static final TranslationTextComponent ITEM_IO_OUT1 = (TranslationTextComponent)REGISTRATE.addLang("mode", ZYCraft.id("item_io"), "out1", "Output1")
.withStyle(TextFormatting.DARK_RED);
public static final TranslationTextComponent ITEM_IO_IN2 = (TranslationTextComponent)REGISTRATE.addLang("mode", ZYCraft.id("item_io"), "in2", "Input2")
.withStyle(TextFormatting.GREEN);
public static final TranslationTextComponent ITEM_IO_OUT2 = (TranslationTextComponent)REGISTRATE.addLang("mode", ZYCraft.id("item_io"), "out2", "Output2")
.withStyle(TextFormatting.YELLOW);
public static final TranslationTextComponent CURRENT_COLOR_LABEL = (TranslationTextComponent)REGISTRATE.addLang("label", ZYCraft.id("current_color"), "Current color:")
.withStyle(TextFormatting.GRAY);
public static final TranslationTextComponent MODE_LABEL = REGISTRATE.addLang("label", ZYCraft.id("mode"), "Mode: ");
public static final TranslationTextComponent SOURCE_FLUID_LABEL = REGISTRATE.addLang("label", ZYCraft.id("source_fluid"), "Source fluid: ");
public static final TranslationTextComponent VOID_FLUID_LABEL = REGISTRATE.addLang("label", ZYCraft.id("void_fluid"), "Void fluid: ");
public static final TranslationTextComponent STORED_FLUID_LABEL = REGISTRATE.addLang("label", ZYCraft.id("stored_fluid"), "Stored fluid: ");
public static final TranslationTextComponent RED_INFO = (TranslationTextComponent)REGISTRATE.addLang("info", ZYCraft.id("red"), "Red: %s")
.withStyle(TextFormatting.RED);
public static final TranslationTextComponent GREEN_INFO = (TranslationTextComponent)REGISTRATE.addLang("info", ZYCraft.id("green"), "Green: %s")
.withStyle(TextFormatting.GREEN);
public static final TranslationTextComponent BLUE_INFO = (TranslationTextComponent)REGISTRATE.addLang("info", ZYCraft.id("blue"), "Blue: %s")
.withStyle(TextFormatting.BLUE);
public static final TranslationTextComponent FLUID_INFO = REGISTRATE.addLang("info", ZYCraft.id("fluid"), "%s %s mB");
public static final TranslationTextComponent FABRICATOR_RECIPE_INCOMPATIBLE = REGISTRATE.addLang("jei", ZYCraft.id("fabricator"), "recipe_incompatible", "Recipe incompatible with Fabricator");
public static void init()
{
REGISTRATE.addLang("itemGroup", ZYCraft.id("fluids"), "ZYCraft Fluids");
REGISTRATE.addLang("death.attack", ZYCraft.id("quartz_crystal"), "%1$s was slowly poked by Quartz Crystal");
REGISTRATE.addLang("death.attack", ZYCraft.id("quartz_crystal"), "player", "%1$s was slowly poked by Quartz Crystal because of %2$s");
}
public static TranslationTextComponent addLangWithArgs(String type, ResourceLocation id, String localizedName, Object... args)
{
return new TranslationTextComponent(REGISTRATE.addLang(type, id, localizedName).getKey(), args);
}
public static TranslationTextComponent addLangWithArgs(String type, ResourceLocation id, String suffix, String localizedName, Object... args)
{
return new TranslationTextComponent(REGISTRATE.addLang(type, id, suffix, localizedName).getKey(), args);
}
public static IFormattableTextComponent copy(TranslationTextComponent component, Object... args)
{
return new TranslationTextComponent(component.getKey(), args).setStyle(component.getStyle());
}
} | java | 10 | 0.740424 | 220 | 84.959184 | 147 | starcoderdata |
//
// => gcore/window/window.cpp
//
// GraphCore
//
// Copyright (c) 2018
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
#include
#ifdef _WIN32
#include
#include
#define usleep(x) std::this_thread::sleep_for(std::chrono::microseconds((long long)(x)))
#else
#include
#endif
#define TARGET_FPS 60
#define FPS_TIME 1.0f/TARGET_FPS
using namespace gcore;
static GLFWmonitor *getMonitor(Window::monitor_index monitorIndex) {
if (monitorIndex == GCORE_WINDOW_NO_FULLSCREEN) {
return nullptr;
}
if (monitorIndex == GCORE_WINDOW_MONITOR_DEFAULT) {
return glfwGetPrimaryMonitor();
}
int monitorCount;
GLFWmonitor **monitors = glfwGetMonitors(&monitorCount);
if (monitorIndex < monitorCount) {
return monitors[monitorIndex];
}
return nullptr;
}
Window::Window(const char *title, window_size width, window_size height, monitor_index monitorIndex) : title(title), width(width), height(height), monitorIndex(monitorIndex) {
if ((_monitor = getMonitor(monitorIndex)) != nullptr) {
int w, h;
glfwGetMonitorPhysicalSize(_monitor, &w, &h);
width = w;
height = h;
}
}
void Window::setSize(window_size w, window_size h) {
width = w;
height = h;
glfwSetWindowSize(_window, w, h);
getDrawer().doResize();
}
void Window::setTitle(const char *title) {
this->title = title;
glfwSetWindowTitle(_window, title);
}
bool Window::make() {
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // To make MacOS happy; should not be needed
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
_window = glfwCreateWindow(width, height, title, _monitor, nullptr);
if (!_window) {
return false;
}
glfwSetInputMode(_window, GLFW_STICKY_KEYS, GL_TRUE);
glfwSetInputMode(_window, GLFW_STICKY_MOUSE_BUTTONS, GL_TRUE);
return true;
}
bool Window::goFullScreen(monitor_index monitorIndex) {
assert(monitorIndex != GCORE_WINDOW_NO_FULLSCREEN);
if (GLFWmonitor *newMonitor = getMonitor(monitorIndex)) {
int w, h;
glfwGetMonitorPhysicalSize(newMonitor, &w, &h);
width = w;
height = h;
_monitor = newMonitor;
glfwSetWindowMonitor(_window, _monitor, 0, 0, width, height, GLFW_DONT_CARE);
return true;
}
return false;
}
bool Window::goWindow(window_size w, window_size h) {
width = w;
height = h;
_monitor = nullptr;
glfwSetWindowMonitor(_window, nullptr, 0, 0, w, h, GLFW_DONT_CARE);
return true;
}
bool Window::startLoop() {
WindowDrawer &drawer = getDrawer();
GLFWwindow *glfwWin = _window;
drawer.doInit();
drawer.doResize();
double lastTime = 0;
glfwSetTime(0);
double startFrameTime, endFrameTime, frameTime;
do {
startFrameTime = glfwGetTime();
{
double dt = startFrameTime - lastTime;
lastTime = startFrameTime;
drawer.doUpdate(dt);
}
drawer.doRender();
glfwSwapBuffers(glfwWin);
glfwPollEvents();
endFrameTime = glfwGetTime();
frameTime = endFrameTime - startFrameTime;
if (frameTime < FPS_TIME) {
usleep((FPS_TIME - frameTime) * 10e5);
}
} while (!shouldClose());
drawer.doDestroy();
return true;
} | c++ | 13 | 0.647168 | 175 | 26.965318 | 173 | starcoderdata |
// Copyright (C) 2014 (
// Licensed under the terms of the MIT license. See LICENCE for details.
#pragma once
#include
#include
namespace lastpass
{
class AtExit
{
public:
typedef std::function Function;
explicit AtExit(Function &&f):
f_(std::move(f))
{
}
~AtExit()
{
f_();
}
private:
Function f_;
};
std::string encode_hex(std::string const &bytes);
std::string decode_hex(std::string const &hex_text);
std::string decode_base64(std::string const &base64_text);
} | c | 12 | 0.632042 | 72 | 15.228571 | 35 | starcoderdata |
private int maxGapIdx(Lois lois, int pos, int limit) {
int maxGapIdx = -1;
{
int maxGap = -1;
int previous = lois.data[pos];
for(int i=pos; i<limit; i++) {
int temp = lois.data[i];
int dif = temp-previous;
if(dif >= maxGap) {
maxGap = dif;
maxGapIdx = i; //this is the first item to be moved
}
previous = temp;
}
assert(maxGapIdx>0) : "Should not happen because there must be a gap";
}
return maxGapIdx;
} | java | 10 | 0.593074 | 73 | 24.722222 | 18 | inline |
#include <bits/stdc++.h>
#include <algorithm>
#define MIN(a,b) (a<b?a:b)
#define MAX(a,b) (a>b?a:b)
#define pi 3.14159265358979323846264338327950288419716939937510582097494459230
//BIT operation
#define SET_BIT (a, x) (a |= ((1<<x)))
#define CLEAR_BIT(a, x) (a &= (~(1<<x)))
#define IS_SET (a, x) ((a>>x) & 1)
int modifyBit(int n, int p, bool b)
{
int mask = 1 << p;
return (n & ~mask) | ((b << p) & mask);
}
using namespace std;
typedef unsigned long long ull;
typedef long double ld;
//TODO : pair , string, priority_queue, set, map, sort, algorithm
int main(int argc, char* argv[])
{
int X;
cin >>X;
int max = 360;
if((360%X)==0)
{
cout << 360/X;
return 0;
}
do
{
if (max % 360 == 0 && max % X == 0)
{
break;
}
else
++max;
} while (true);
cout << max/X;
return 0;
}
| c++ | 10 | 0.530934 | 78 | 18.755556 | 45 | codenet |
package com.jobin.test;
/**
* Created by msi on 10/17/2017.
*/
public class Data{
String name;
String img_url,message;
String gmail;
public Data(){
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getImg_url() {
return img_url;
}
public void setImg_url(String img_url) {
this.img_url = img_url;
}
public String getGmail() {
return gmail;
}
public void setGmail(String gmail) {
this.gmail = gmail;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
} | java | 8 | 0.560976 | 44 | 15.065217 | 46 | starcoderdata |
function resultToCSV(resultObject, columsToInclude) {
var columns = Object.keys(resultObject[0]).filter(function (rowName) {
return columsToInclude[rowName] == true;
});
var output_csv_string = columns.join(",") + getLinebreak(); // Finally every row is written to the csv string:
for (var i = 0; i < resultObject.length; i++) {
output_csv_string += Object.keys(resultObject[i]).filter(function (rowName) {
return columsToInclude[rowName] == true;
}).map(function (keyName) {
return resultObject[i][keyName].toString();
}).join(",") + getLinebreak();
}
return output_csv_string;
} | javascript | 19 | 0.673633 | 112 | 37.9375 | 16 | inline |
# scraping comments
import datetime
import json
import os
import threading
import time
from showroom.constants import TOKYO_TZ, FULL_DATE_FMT
from showroom.utils import format_name
from requests.exceptions import HTTPError
# TODO: save comments, stats, telop(s)
# {
# "comment_log": [],
# "telop": {
# "latest": {
# "text": "",
# "created_at": ""
# },
# "older": [
# {
# "text": "",
# "created_at": ""
# }
# ]
# },
# "live_info": {
# # stuff like view count over time etc.
# }
# }
'''
Option 1:
2 separate "loggers", one for comments, one for stats/telop
The *only* reason to do this is to allow grabbing just stats and telop instead of all three.
So I'm not going to do that. What's option 2.
Options 2:
StatsLogger, CommentsLogger, RoomLogger:
StatsLogger records just stats and telop
'''
class CommentLogger(object):
comment_id_pattern = "{created_at}_{user_id}"
def __init__(self, room, client, settings, watcher):
self.room = room
self.client = client
self.settings = settings
self.watcher = watcher
self.last_update = datetime.datetime.fromtimestamp(10000, tz=TOKYO_TZ)
self.update_interval = self.settings.comments.default_update_interval
self.comment_log = []
self.comment_ids = set()
self._thread = None
def start(self):
if not self._thread:
self._thread = threading.Thread(target=self.run, name='{} Comment Log'.format(self.room.name))
self._thread.start()
def run(self):
# TODO: allow comment_logger to trigger get_live_status ?
last_counts = []
max_interval = self.settings.comments.max_update_interval
min_interval = self.settings.comments.min_update_interval
_, destdir, filename = format_name(self.settings.directory.data,
self.watcher.start_time.strftime(FULL_DATE_FMT),
self.room, ext=self.settings.ffmpeg.container)
# TODO: modify format_name so it doesn't require so much hackery for this
filename = filename.replace(self.settings.ffmpeg.container, ' comments.json')
destdir += '/comments'
# TODO: only call this once per group per day
os.makedirs(destdir, exist_ok=True)
outfile = '/'.join((destdir, filename))
def add_counts(count):
return [count] + last_counts[:2]
print("Recording comments for {}".format(self.room.name))
while self.watcher.is_live():
count = 0
seen = 0
# update comments
try:
data = self.client.comment_log(self.room.room_id) or []
except HTTPError as e:
# TODO: log/handle properly
print('HTTP Error while getting comments for {}: {}'.format(self.room.handle, e))
break
for comment in data:
if len(comment['comment']) < 4 and comment['comment'].isdigit():
continue
cid = self.comment_id_pattern.format(**comment)
if cid not in self.comment_ids:
self.comment_log.append(comment)
self.comment_ids.add(cid)
count += 1
else:
seen += 1
if seen > 5:
last_counts = add_counts(count)
break
# update update_interval if needed
highest_count = max(last_counts, default=10)
if highest_count < 7 and self.update_interval < max_interval:
self.update_interval += 1.0
elif highest_count > 50 and self.update_interval > min_interval:
self.update_interval *= 0.5
elif highest_count > 20 and self.update_interval > min_interval:
self.update_interval -= 1.0
current_time = datetime.datetime.now(tz=TOKYO_TZ)
timediff = (current_time - self.last_update).total_seconds()
self.last_update = current_time
sleep_timer = max(0.5, self.update_interval - timediff)
time.sleep(sleep_timer)
with open(outfile, 'w', encoding='utf8') as outfp:
json.dump({"comment_log": sorted(self.comment_log, key=lambda x: x['created_at'], reverse=True)},
outfp, indent=2, ensure_ascii=False)
def join(self):
self._thread.join()
class RoomScraper:
comment_id_pattern = "{created_at}_{user_id}"
def __init__(self, room, client, settings, watcher, record_comments=False):
self.room = room
self.client = client
self.settings = settings
self.watcher = watcher
self.last_update = datetime.datetime.fromtimestamp(10000, tz=TOKYO_TZ)
self.update_interval = self.settings.comments.default_update_interval
self.comment_log = []
self.comment_ids = set()
self._thread = None
self.record_comments = record_comments
def start(self):
if not self._thread:
if self.record_comments:
self._thread = threading.Thread(target=self.record_with_comments,
name='{} Room Log'.format(self.room.name))
else:
self._thread = threading.Thread(target=self.record,
name='{} Room Log'.format(self.room.name))
self._thread.start()
def _fetch_comments(self):
pass
def _parse_comments(self, comment_log):
pass
def _fetch_info(self):
"https://www.showroom-live.com/room/get_live_data?room_id=76535"
pass
def _parse_info(self, info):
result = {
# TODO: check for differences between result and stored data
# some of this stuff should never change and/or is useful in the Watcher
"live_info": {
"created_at": info['live_res'].get('created_at'),
"started_at": info['live_res'].get('started_at'),
"live_id": info['live_res'].get('live_id'),
"comment_num": info['live_res'].get('comment_num'), # oooohhhhhh
# "chat_token": info['live_res'].get('chat_token'),
"hot_point": "",
"gift_num": "",
"live_type": "",
"ended_at": "",
"view_uu": "",
"bcsvr_key": "",
},
"telop": info['telop'],
"broadcast_key": "", # same as live_res.bcsvr_key
"online_user_num": "", # same as live_res.view_uu
"room": {
"last_live_id": "",
},
"broadcast_port": 8080,
"broadcast_host": "onlive.showroom-live.com",
}
pass
def record_with_comments(self):
# TODO: allow comment_logger to trigger get_live_status ?
last_counts = []
max_interval = self.settings.comments.max_update_interval
min_interval = self.settings.comments.min_update_interval
_, destdir, filename = format_name(self.settings.directory.data,
self.watcher.start_time.strftime(FULL_DATE_FMT),
self.room, self.settings.ffmpeg.container)
# TODO: modify format_name so it doesn't require so much hackery for this
filename = filename.replace('.{}'.format(self.settings.ffmpeg.container), ' comments.json')
destdir += '/comments'
# TODO: only call this once per group per day
os.makedirs(destdir, exist_ok=True)
outfile = '/'.join((destdir, filename))
def add_counts(count):
return [count] + last_counts[:2]
print("Recording comments for {}".format(self.room.name))
while self.watcher.is_live():
count = 0
seen = 0
# update comments
try:
data = self.client.comment_log(self.room.room_id) or []
except HTTPError as e:
# TODO: log/handle properly
print('HTTP Error while getting comments for {}: {}\n{}'.format(self.room.handle, e, e.response.content))
break
for comment in data:
cid = self.comment_id_pattern.format(**comment)
if cid not in self.comment_ids:
self.comment_log.append(comment)
self.comment_ids.add(cid)
count += 1
else:
seen += 1
if seen > 5:
last_counts = add_counts(count)
break
# update update_interval if needed
highest_count = max(last_counts, default=10)
if highest_count < 7 and self.update_interval < max_interval:
self.update_interval += 1.0
elif highest_count > 50 and self.update_interval > min_interval:
self.update_interval *= 0.5
elif highest_count > 20 and self.update_interval > min_interval:
self.update_interval -= 1.0
current_time = datetime.datetime.now(tz=TOKYO_TZ)
timediff = (current_time - self.last_update).total_seconds()
self.last_update = current_time
sleep_timer = max(0.5, self.update_interval - timediff)
time.sleep(sleep_timer)
with open(outfile, 'w', encoding='utf8') as outfp:
json.dump({"comment_log": sorted(self.comment_log, key=lambda x: x['created_at'], reverse=True)},
outfp, indent=2, ensure_ascii=False)
def record(self):
pass
def join(self):
pass | python | 19 | 0.54165 | 121 | 35.590406 | 271 | starcoderdata |
<?php
namespace App\Http\Controllers\Frontend;
use App\Http\Controllers\Controller;
use Illuminate\Http\Request;
use App\Models\AboutUs;
use App\Models\PracticalSolution;
use App\Models\Slider;
use App\Models\Champion;
use App\Models\MedicalOpinion;
use App\Models\VideosParticipate;
use Spatie\MediaLibrary\Models\Media;
use App\Http\Controllers\Traits\MediaUploadingTrait;
use Auth;
class HomeController extends Controller
{
public function index()
{
$about = AboutUs::first();
$solutions = PracticalSolution::where('status','accepted')
->orderBy('updated_at', 'desc')
->get()
->take(2);
$sliders = Slider::with(['media'])->get();
$champions = Champion::orderBy('updated_at', 'desc')
->get()
->take(4);
$medicalOpinions = MedicalOpinion::orderBy('updated_at', 'desc')
->get()
->take(2);
return view(
'frontend.home',
compact(
'about',
'solutions',
'sliders',
'champions',
'medicalOpinions'
)
);
}
public function profile(){
$user = Auth::user();
return view('frontend.profile',compact('user'));
}
public function profile_solves(){
$user = Auth::user();
$practical_solutions = PracticalSolution::where('user_id',$user->id)->orderBy('created_at','desc')->paginate(6);
return view('frontend.profilesolves',compact('user','practical_solutions'));
}
public function profile_videos(){
$user = Auth::user();
$practical_solutions = PracticalSolution::pluck('title', 'id')->prepend(trans('global.pleaseSelect'), '');
$champions = Champion::pluck('name', 'id')->prepend(trans('global.pleaseSelect'), '');
$video_participates = VideosParticipate::where('user_id',$user->id)->orderBy('created_at','desc')->paginate(6);
return view('frontend.profilevideos',compact('user','video_participates','practical_solutions','champions'));
}
public function EditProfile(){
$user = Auth::user();
return view('frontend.EditProfile',compact('user'));
}
} | php | 14 | 0.594727 | 120 | 32.909091 | 66 | starcoderdata |
void Display::fillRectangle(int16_t x0,int16_t y0, int16_t w, int16_t h){
int16_t x,y,x1,y1;
x1=x0+w;y1=y0+h;
if ((x0<0 && x1<0) || (x0>=width && x1 >=width)) return; //completely out of bounds
if ((y0<0 && y1<0) || (y0>=height && y1 >=height)) return; //completely out of bounds
if (x0>x1) {x=x1;x1=x0;}
else x=x0;
if (y0>y1) {y=y1;y1=y0;}
else y=y0;
if (x<0) x=0;
if (y<0) y=0;
for (;x<x1;x++) drawColumn(x,y,y1);
} | c++ | 9 | 0.526427 | 89 | 34.538462 | 13 | inline |
<?php
/*
$defaults = array(
'type' => 'monthly', 'limit' => '',
'format' => 'html', 'before' => '',
'after' => '', 'show_post_count' => false,
'echo' => 1, 'order' => 'DESC',
);
*/
class Tests_Get_Archives extends WP_UnitTestCase {
protected static $post_ids;
protected $month_url;
protected $year_url;
function setUp() {
parent::setUp();
$this->month_url = get_month_link( gmdate( 'Y' ), gmdate( 'm' ) );
$this->year_url = get_year_link( gmdate( 'Y' ) );
}
public static function wpSetUpBeforeClass( $factory ) {
self::$post_ids = $factory->post->create_many(
8,
array(
'post_type' => 'post',
'post_author' => '1',
)
);
}
function test_wp_get_archives_default() {
$expected['default'] = " href='" . $this->month_url . "'>" . gmdate( 'F Y' ) . '
$this->assertEquals( $expected['default'], trim( wp_get_archives( array( 'echo' => false ) ) ) );
}
function test_wp_get_archives_type() {
$expected['type'] = " href='" . $this->year_url . "'>" . gmdate( 'Y' ) . '
$this->assertEquals(
$expected['type'],
trim(
wp_get_archives(
array(
'echo' => false,
'type' => 'yearly',
)
)
)
);
}
function test_wp_get_archives_limit() {
$ids = array_slice( array_reverse( self::$post_ids ), 0, 5 );
$link1 = get_permalink( $ids[0] );
$link2 = get_permalink( $ids[1] );
$link3 = get_permalink( $ids[2] );
$link4 = get_permalink( $ids[3] );
$link5 = get_permalink( $ids[4] );
$title1 = get_post( $ids[0] )->post_title;
$title2 = get_post( $ids[1] )->post_title;
$title3 = get_post( $ids[2] )->post_title;
$title4 = get_post( $ids[3] )->post_title;
$title5 = get_post( $ids[4] )->post_title;
$expected['limit'] = <<<EOF
href='$link1'>$title1
href='$link2'>$title2
href='$link3'>$title3
href='$link4'>$title4
href='$link5'>$title5
EOF;
$this->assertEqualsIgnoreEOL(
$expected['limit'],
trim(
wp_get_archives(
array(
'echo' => false,
'type' => 'postbypost',
'limit' => 5,
)
)
)
);
}
function test_wp_get_archives_format() {
$expected['format'] = "<option value='" . $this->month_url . "'> " . gmdate( 'F Y' ) . '
$this->assertEquals(
$expected['format'],
trim(
wp_get_archives(
array(
'echo' => false,
'format' => 'option',
)
)
)
);
}
function test_wp_get_archives_before_and_after() {
$expected['before_and_after'] = " href='" . $this->month_url . "'>" . gmdate( 'F Y' ) . '
$this->assertEquals(
$expected['before_and_after'],
trim(
wp_get_archives(
array(
'echo' => false,
'format' => 'custom',
'before' => '
'after' => '
)
)
)
);
}
function test_wp_get_archives_show_post_count() {
$expected['show_post_count'] = " href='" . $this->month_url . "'>" . gmdate( 'F Y' ) . '
$this->assertEquals(
$expected['show_post_count'],
trim(
wp_get_archives(
array(
'echo' => false,
'show_post_count' => 1,
)
)
)
);
}
function test_wp_get_archives_echo() {
$expected['echo'] = "\t href='" . $this->month_url . "'>" . gmdate( 'F Y' ) . ' . "\n";
$this->expectOutputString( $expected['echo'] );
wp_get_archives( array( 'echo' => true ) );
}
function test_wp_get_archives_order() {
self::factory()->post->create(
array(
'post_type' => 'post',
'post_author' => '1',
'post_date' => '2012-10-23 19:34:42',
)
);
$date_full = gmdate( 'F Y' );
$oct_url = get_month_link( 2012, 10 );
$expected['order_asc'] = <<<EOF
href='{$oct_url}'>October 2012
href='{$this->month_url}'>$date_full
EOF;
$this->assertEqualsIgnoreEOL(
$expected['order_asc'],
trim(
wp_get_archives(
array(
'echo' => false,
'order' => 'ASC',
)
)
)
);
$expected['order_desc'] = <<<EOF
href='{$this->month_url}'>$date_full
href='{$oct_url}'>October 2012
EOF;
$this->assertEqualsIgnoreEOL(
$expected['order_desc'],
trim(
wp_get_archives(
array(
'echo' => false,
'order' => 'DESC',
)
)
)
);
}
/**
* @ticket 21596
*/
function test_wp_get_archives_post_type() {
register_post_type( 'taco', array( 'public' => true ) );
self::factory()->post->create(
array(
'post_type' => 'taco',
'post_author' => '1',
'post_date' => '2014-10-23 19:34:42',
)
);
$oct_url = esc_url( add_query_arg( 'post_type', 'taco', get_month_link( 2014, 10 ) ) );
$expected = " href='{$oct_url}'>October 2014
$archives = wp_get_archives(
array(
'echo' => false,
'post_type' => 'taco',
)
);
$this->assertEquals( $expected, trim( $archives ) );
}
} | php | 18 | 0.525504 | 116 | 23.085714 | 210 | starcoderdata |
void ChannelControl::OnConfigurationChanged()
{
if (IsInCaptureMode(CaptureMode::Inspecting))
{
for (ChannelProfilerWidget* profilerWidget : m_profilerWidgets)
{
profilerWidget->GetAggregator()->OnConfigurationChanged();
}
// Force the channel data view to regrab all of the aggregator data.
channelDataView->RefreshGraphData();
}
} | c++ | 12 | 0.600907 | 80 | 33 | 13 | inline |
using System;
using Gtk;
namespace QS.Tdi.Gtk
{
public interface ITDIWidgetResolver
{
Widget Resolve(ITdiTab tab);
}
} | c# | 8 | 0.753247 | 36 | 13 | 11 | starcoderdata |
const {ethers} = require("hardhat");
const {BigNumber} = require("ethers");
const {addPool, initPool} = require("./compoundAsset");
const {deployContract} = require("./utils");
async function deployCompound() {
// 账号
const [sender,] = await ethers.getSigners();
// 部署timelock
const timelockFactory = await ethers.getContractFactory("Timelock");
const timelockInstance = await timelockFactory.deploy(sender.address, BigNumber.from("1"));
// comp
const comp = await deployContract("Comp", [sender.address])
// oracle
const priceOracle = await deployContract("SimplePriceOracle", []);
// factor
const facotr = BigNumber.from(1).mul(10).pow(18).div(100);
const factor16 = BigNumber.from(10).pow(16);
const factor18 = BigNumber.from(10).pow(18);
const interestModel = await deployContract("JumpRateModelV2", [
((BigNumber.from(5)).mul(facotr)),//baseRatePerYear
(BigNumber.from(12).mul(facotr)),//multiplierPerYear
(BigNumber.from(24).mul(facotr)),//jumpMultiplierPerYear
(BigNumber.from(80).mul(facotr)),//kink_
timelockInstance.address // owner
])
const unitrollerProxy = await deployContract("Unitroller", []);
const comptrollerImpl = await deployContract("Comptroller", []);
// console.log("comptrollerImpl: ", comptrollerImpl.address);
await unitrollerProxy._setPendingImplementation(comptrollerImpl.address);
await comptrollerImpl._become(unitrollerProxy.address);
const unitrollerProxyToImplFa = await ethers.getContractFactory("Comptroller");
const unitrollerProxyToImpl = unitrollerProxyToImplFa.attach(unitrollerProxy.address);
// 初始化comptroller参数
await unitrollerProxyToImpl._setPriceOracle(priceOracle.address);
// maximum fraction of origin loan that can be liquidated
await unitrollerProxyToImpl._setCloseFactor(BigNumber.from(50).mul(facotr));
// 50%
// collateral received as a multiplier of amount liquidator paid
await unitrollerProxyToImpl._setLiquidationIncentive(BigNumber.from(108).mul(facotr));
// 108%
// add pool 1
// 底层资产
const TT1 = await deployContract("TestERC20Asset", ["TT1", "T1"]);
const data = "0x00";
const cTT1 = await addPool(
TT1.address,
unitrollerProxy.address,
interestModel.address,
BigNumber.from(2).mul(factor16),
"Compound cTT1",
"cTT1",
8,
sender.address,
data
)
//await cTT1._setReserveFactor(BigNumber.from(25).mul(facotr));
// // set price of TT1
// await priceOracle.setUnderlyingPrice(cTT1.address, BigNumber.from(1).mul(facotr).mul(100));
// // set markets supported by comptroller
// await unitrollerProxyToImpl._supportMarket(cTT1.address);
// // multiplier of collateral for borrowing cap
// await unitrollerProxyToImpl._setCollateralFactor(
// cTT1.address,
// BigNumber.from(60).mul(facotr)
// );
let reserveFactor = BigNumber.from(25).mul(facotr);
let underlyingPrice = 100;
let collateralFactor = BigNumber.from(60).mul(facotr);
let contracts = {priceOracle: priceOracle, unitrollerProxyToImpl: unitrollerProxyToImpl,}
let compSpeed = BigNumber.from(67).mul(facotr).div(10)
await initPool(
cTT1,
contracts,
reserveFactor,
BigNumber.from(underlyingPrice).mul(factor18),
collateralFactor,
0,
compSpeed,
compSpeed
)
// add pool 2
const TT2 = await deployContract("TestERC20Asset", ["TT2", "T2"]);
const cTT2 = await addPool(TT2.address,
unitrollerProxy.address,
interestModel.address,
BigNumber.from(2).mul(factor16),
"Compound cTT2",
"cTT2",
8,
sender.address,
data
)
await initPool(
cTT2,
contracts,
reserveFactor,
BigNumber.from(underlyingPrice).mul(factor18),
collateralFactor,
0,
compSpeed,
compSpeed
)
return {
TT1: TT1,
TT2: TT2,
comp: comp,
cTT1: cTT1,
cTT2: cTT2,
unitrollerProxy: unitrollerProxy,
priceOracle: priceOracle,
interestModel: interestModel
};
}
module.exports = {
deployCompound,
} | javascript | 15 | 0.655493 | 98 | 31.067164 | 134 | starcoderdata |
# import torch
# import torch.nn as nn
# from torchvision.models import resnet18
# import numpy as np
# class AffineCouplingLayer(nn.Module):
# def __init__(self, input_dim, hidden_dim, mask):
# super(AffineCouplingLayer, self).__init__()
# self.mask = mask
# self.net = nn.Sequential(
# nn.Linear(input_dim, hidden_dim),
# nn.LeakyReLU(),
# nn.Linear(hidden_dim, hidden_dim),
# nn.LeakyReLU(),
# nn.Linear(hidden_dim, 2*input_dim)
# )
# def forward(self, input, reverse=False):
# x0 = torch.mul(input, self.mask)
# st = self.net(x0)
# # rescale s with tanh and scale factor
# s, t = torch.chunk(st, 2, dim=1)
# s = torch.mul(1-self.mask, torch.tanh(s))
# t = torch.mul(1-self.mask, t)
# if reverse:
# # FROM Z TO X
# tmp = torch.mul(input-t, torch.exp(-s))
# output = x0 + torch.mul(1-self.mask, tmp)
# log_det = -s.sum(-1)
# else:
# # FROM X TO Z
# tmp = torch.mul(input, torch.exp(s)) + t
# output = x0 + torch.mul(1-self.mask, tmp)
# log_det = s.sum(-1)
# return output, log_det
# class Net(nn.Module):
# def __init__(self, N, input_dim, hidden_dim, device):
# super(Net, self).__init__()
# self.n = 4
# self.device = device
# mask_checkerboard = np.indices((1, input_dim)).sum(axis=0)%2
# mask_checkerboard = np.append(mask_checkerboard,1 - mask_checkerboard,axis=0)
# mask_checkerboard = np.append(mask_checkerboard, mask_checkerboard, axis=0)
# mask_checkerboard = np.append(mask_checkerboard, 1 - mask_checkerboard, axis=0)
# # print("input dim", N, input_dim, mask_checkerboard)
# self.masks = torch.Tensor(mask_checkerboard).to(self.device)
# self.layers = nn.ModuleList([AffineCouplingLayer(input_dim=input_dim, hidden_dim=hidden_dim, mask=self.masks[i]) for i in range(self.n)])
# def forward(self, input, reverse=False):
# # stack 3 layers with alternating checkboard pattern.
# log_det_loss = torch.zeros(input.size()[0]).to(self.device)
# z = input
# index_range = range(self.n) if not reverse else range(self.n-1, -1 , -1)
# # print(z, reverse)
# for idx in index_range:
# z, log_det = self.layers[idx](z, reverse)
# log_det_loss += log_det
# return z, log_det_loss
import numpy as np
import matplotlib.pyplot as plt
from pylab import rcParams
rcParams['figure.figsize'] = 10, 8
rcParams['figure.dpi'] = 300
import torch
from torch import nn
from torch import distributions
from torch.nn.parameter import Parameter
from sklearn import cluster, datasets, mixture
from sklearn.preprocessing import StandardScaler
class RealNVP(nn.Module):
def __init__(self, nets, nett, masks, prior):
super(RealNVP, self).__init__()
self.prior = prior
self.mask = nn.Parameter(masks, requires_grad=False)
self.t = torch.nn.ModuleList([nett() for _ in range(len(masks))])
self.s = torch.nn.ModuleList([nets() for _ in range(len(masks))])
def g(self, z):
x = z
for i in range(len(self.t)):
x_ = x*self.mask[i]
s = self.s[i](x_)*(1 - self.mask[i])
t = self.t[i](x_)*(1 - self.mask[i])
x = x_ + (1 - self.mask[i]) * (x * torch.exp(s) + t)
return x
def f(self, x):
log_det_J, z = x.new_zeros(x.shape[0]), x
for i in reversed(range(len(self.t))):
z_ = self.mask[i] * z
s = self.s[i](z_) * (1-self.mask[i])
t = self.t[i](z_) * (1-self.mask[i])
z = (1 - self.mask[i]) * (z - t) * torch.exp(-s) + z_
log_det_J -= s.sum(dim=1)
return z, log_det_J
def log_prob(self,x):
z, logp = self.f(x)
return self.prior.log_prob(z) + logp
def sample(self, batchSize):
z = self.prior.sample((batchSize, 1))
logp = self.prior.log_prob(z)
x = self.g(z)
return x | python | 16 | 0.554237 | 147 | 35.643478 | 115 | starcoderdata |
// File origin: VS1LAB A2
/**
* A class to help using the HTML5 Geolocation API.
*/
// eslint-disable-next-line no-unused-vars
class LocationHelper {
// Location values for latitude and longitude are private properties to protect them from changes.
#latitude = '';
/**
* Getter method allows read access to privat location property.
*/
get latitude() {
return this.#latitude;
}
#longitude = '';
get longitude() {
return this.#longitude;
}
/**
* Create LocationHelper instance if coordinates are known.
* @param {string} latitude
* @param {string} longitude
*/
constructor(latitude, longitude) {
this.#latitude = (parseFloat(latitude)).toFixed(5);
this.#longitude = (parseFloat(longitude)).toFixed(5);
}
/**
* The 'findLocation' method requests the current location details through the geolocation API.
* It is a static method that should be used to obtain an instance of LocationHelper.
* Throws an exception if the geolocation API is not available.
* @param {*} callback a function that will be called with a LocationHelper instance as parameter, that has the current location details
*/
static findLocation(callback) {
const geoLocationApi = navigator.geolocation
if (!geoLocationApi) {
throw new Error("The GeoLocation API is unavailable.");
}
// Call to the HTML5 geolocation API.
// Takes a first callback function as argument that is called in case of success.
// Second callback is optional for handling errors.
// These callbacks are given as arrow function expressions.
geoLocationApi.getCurrentPosition((location) => {
// Create and initialize LocationHelper object.
let helper = new LocationHelper(location.coords.latitude, location.coords.longitude);
// Pass the locationHelper object to the callback.
callback(helper);
}, (error) => {
alert(error.message)
});
}
} | javascript | 16 | 0.647285 | 140 | 34.271186 | 59 | starcoderdata |
#!/usr/bin/env python
def dct_aliasses():
return {
0 : 0,
'reset' : 0,
'bold' : 1,
'faint' : 2,
'italic' : 3,
'uline' : 4,
'sblink' : 5,
'fblink' : 6,
'inv' : 7,
'hide' : 8,
'strike' : 9,
'font_A' : 10,
'font_B' : 11,
'font_C' : 12,
'font_D' : 13,
'font_E' : 14,
'font_F' : 15,
'font_G' : 16,
'font_H' : 17,
'font_I' : 18,
'font_J' : 19,
#20
'd_uline' : 21,
#22
#23
'no_uline' : 24,
'no_blink' : 25,
#26
#27
#28
'no_strike' : 29,
'fg_black' : 30,
'fg_red' : 31,
'fg_green' : 32,
'fg_yellow' : 33,
'fg_blue' : 34,
'fg_magenta' : 35,
'fg_cyan' : 36,
'fg_white' : 37,
'fg_rgb' : 38,
'bg_black' : 40,
'bg_red' : 41,
'bg_green' : 42,
'bg_yellow' : 43,
'bg_blue' : 44,
'bg_magenta' : 45,
'bg_cyan' : 46,
'bg_white' : 47,
'bg_rgb' : 48,
'black' : 30,
'red' : 31,
'green' : 32,
'yellow' : 33,
'blue' : 34,
'magenta' : 35,
'cyan' : 36,
'white' : 37,
'rgb' : 38,
'bblack' : 40,
'bred' : 41,
'bgreen' : 42,
'byellow' : 43,
'bblue' : 44,
'bmagenta' : 45,
'bcyan' : 46,
'bwhite' : 47,
}
def str_seq(style):
def mkup(m):
dct_markup=dct_aliasses()
return dct_markup.get(m)
lst_seq=[str(mkup(s)) for s in style]
str_seq=str(';').join(lst_seq)
return str_seq | python | 10 | 0.390123 | 38 | 19.012346 | 81 | starcoderdata |
func main() {
// configuration
cfgr := nsqm.Local()
// nsq producer for sending requests
producer, err := nsqm.NewProducer(cfgr)
if err != nil {
log.Fatal(err)
}
// rpc client: sends requests, waits and accepts responses
// provides interface for application
rpcClient := rpc.NewClient(producer, reqTopic, rspTopic)
// create consumer arround rpcClient
consumer, err := nsqm.NewConsumer(cfgr, rspTopic, channel, rpcClient)
if err != nil {
log.Fatal(err)
}
// application client
client := &client{t: rpcClient}
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
// clean exit
defer producer.Stop() // 3. stop producing new requests
defer cancel() // 2. cancel any pending (waiting for responses)
defer consumer.Stop() // 1. stop listening for responses
x := 2
y := 3
z, err := client.Add(ctx, x, y)
if err != nil {
log.Fatal(err)
}
fmt.Printf("%d + %d = %d\n", x, y, z)
} | go | 10 | 0.663147 | 71 | 24.621622 | 37 | inline |
package de.mpicbg.scf.volumemanager.core;
import de.mpicbg.scf.imgtools.geometry.create.Thresholding;
import ij.ImagePlus;
import ij.gui.PolygonRoi;
import ij.gui.Roi;
import ij.gui.ShapeRoi;
import ij.process.FloatPolygon;
import java.awt.*;
import net.imglib2.Interval;
import net.imglib2.RandomAccess;
import net.imglib2.RandomAccessibleInterval;
import net.imglib2.RealRandomAccessibleRealInterval;
import net.imglib2.img.Img;
import net.imglib2.img.array.ArrayImgs;
import net.imglib2.img.display.imagej.ImageJFunctions;
import net.imglib2.roi.IterableRegion;
import net.imglib2.roi.Regions;
import net.imglib2.type.BooleanType;
import net.imglib2.type.logic.BoolType;
import net.imglib2.type.numeric.integer.UnsignedByteType;
import net.imglib2.util.Intervals;
import net.imglib2.view.Views;
/**
* This class contains convencience functions ROI in ImageJ1/Image2/Imglib2 handling.
*
* Author: Scientific Computing Facility, MPI-CBG Dresden,
* Date: June 2016
*
* Copyright 2017 Max Planck Institute of Molecular Cell Biology and Genetics,
* Dresden, Germany
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
public class RoiUtilities {
public static Roi fixRoi(Roi roi) {
if (roi == null) {
return null;
}
if (roi.getType() == Roi.POLYLINE) {
return roi;
}
Roi[] roiArr = new ShapeRoi(roi).getRois();
if (roiArr.length == 1) {
if (roiArr[0].getBounds().x != roi.getBounds().x || roiArr[0].getBounds().y != roi.getBounds().y) {
FloatPolygon fp = roiArr[0].getFloatPolygon();
for (int i = 0; i < fp.npoints; i++) {
fp.xpoints[i] += roi.getBounds().x;
fp.ypoints[i] += roi.getBounds().y;
}
return new PolygonRoi(fp, Roi.POLYGON);
}
return roiArr[0];
}
if (roiArr.length == 0) {
return roi;
}
FloatPolygon firstPolygon = roiArr[0].getFloatPolygon();
for (int i = 1; i < roiArr.length; i++) {
FloatPolygon secondPolygon = roiArr[i].getFloatPolygon();
double minimumSquaredDistance = Double.MAX_VALUE;
int firstShortestIndex = 0;
int secondShortestIndex = 0;
// find closest points
for (int firstCount = 0; firstCount < firstPolygon.npoints; firstCount++) {
for (int secondCount = 0; secondCount < secondPolygon.npoints; secondCount++) {
double distance = Math.pow(firstPolygon.xpoints[firstCount] - secondPolygon.xpoints[secondCount], 2) +
Math.pow(firstPolygon.ypoints[firstCount] - secondPolygon.ypoints[secondCount], 2);
if (distance < minimumSquaredDistance) {
minimumSquaredDistance = distance;
firstShortestIndex = firstCount;
secondShortestIndex = secondCount;
}
}
}
FloatPolygon summedPolygon = new FloatPolygon();
for (int j = 0; j <= firstShortestIndex; j++) {
summedPolygon.addPoint(firstPolygon.xpoints[j], firstPolygon.ypoints[j]);
}
for (int j = secondShortestIndex; j < secondPolygon.npoints; j++) {
summedPolygon.addPoint(secondPolygon.xpoints[j], secondPolygon.ypoints[j]);
}
for (int j = 0; j <= secondShortestIndex; j++) {
summedPolygon.addPoint(secondPolygon.xpoints[j], secondPolygon.ypoints[j]);
}
for (int j = firstShortestIndex; j < firstPolygon.npoints; j++) {
summedPolygon.addPoint(firstPolygon.xpoints[j], firstPolygon.ypoints[j]);
}
firstPolygon = summedPolygon;
}
PolygonRoi pr = new PolygonRoi(firstPolygon, Roi.POLYGON);
if (pr.getBounds().x != roi.getBounds().x || pr.getBounds().y != roi.getBounds().y) {
pr.getBounds().x = roi.getBounds().x;
pr.getBounds().y = roi.getBounds().y;
}
return pr;
}
public static boolean rectanglesEqual(Rectangle r1, Rectangle r2) {
return
r1.x == r2.x &&
r1.y == r2.y &&
r1.width == r2.width &&
r1.height == r2.height;
}
public static RandomAccessibleInterval raster(RealRandomAccessibleRealInterval cr) {
int n = cr.numDimensions();
long[] minmax = new long[n * 2];
for (int d = 0; d < n; d++) {
minmax[d] = (long) Math.floor(cr.realMin(d));
minmax[d + n] = (long) Math.ceil(cr.realMax(d));
}
Interval interval = Intervals.createMinMax(minmax);
return Views.interval(Views.raster(cr), interval);
}
public static <B extends BooleanType Roi getRoiFromRAISlice(RandomAccessibleInterval lr, Interval interval) {
IterableRegion iterable = Regions.iterable(lr);
net.imglib2.Cursor cursor = iterable.cursor();
Img img = ArrayImgs.unsignedBytes(new long[]{interval.max(0), interval.max(1)});
RandomAccess ira = img.randomAccess();
long[] position3 = new long[3];
long[] position2 = new long[2];
int countPixels = 0;
while (cursor.hasNext()) {
cursor.next();
cursor.localize(position3);
if (position3[0] >= interval.min(0) &&
position3[1] >= interval.min(1) &&
position3[2] >= interval.min(2) &&
position3[0] <= interval.max(0) &&
position3[1] <= interval.max(1) &&
position3[2] <= interval.max(2)) {
position2[0] = position3[0];
position2[1] = position3[1];
ira.setPosition(position3);
ira.get().set((byte) 255);
countPixels++;
}
}
if (countPixels > 0) {
ImagePlus maskImage = ImageJFunctions.wrapUnsignedByte(img, "");
Roi roi = Thresholding.applyThreshold(maskImage, 128, 256);
return roi;
} else {
return null;
}
}
} | java | 19 | 0.614196 | 122 | 36.658654 | 208 | starcoderdata |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Reflection;
using System.Text;
using UnityEngine;
using XUnity.AutoTranslator.Plugin.Core.Configuration;
using XUnity.AutoTranslator.Plugin.Core.Constants;
using XUnity.AutoTranslator.Plugin.Core.Text;
using XUnity.AutoTranslator.Plugin.Core.Utilities;
using XUnity.Common.Constants;
using XUnity.Common.Logging;
using XUnity.Common.Utilities;
namespace XUnity.AutoTranslator.Plugin.Core.Extensions
{
internal static class TextComponentExtensions
{
private static readonly string SupportRichTextPropertyName = "supportRichText";
private static readonly string RichTextPropertyName = "richText";
private static readonly Dictionary<Type, ITextComponentManipulator> _manipulators = new Dictionary<Type, ITextComponentManipulator>();
private static ITextComponentManipulator GetTextManipulator( object ui )
{
var type = ui.GetType();
if( type == null )
{
return null;
}
if( !_manipulators.TryGetValue( type, out var manipulator ) )
{
if( type == ClrTypes.TextField )
{
manipulator = new FairyGUITextComponentManipulator();
}
else if( type == ClrTypes.TextArea2D )
{
manipulator = new TextArea2DComponentManipulator();
}
else
{
manipulator = new DefaultTextComponentManipulator( type );
}
_manipulators[ type ] = manipulator;
}
return manipulator;
}
public static bool IsComponentActive( this object ui )
{
if( ui is Component component )
{
var go = component.gameObject;
if( go )
{
if( component is Behaviour be )
{
return go.activeInHierarchy && be.enabled;
}
else
{
return go.activeInHierarchy;
}
}
}
return true;
}
public static bool IsKnownTextType( this object ui )
{
if( ui == null ) return false;
var type = ui.GetType();
return ( Settings.EnableIMGUI && ui is GUIContent )
|| ( Settings.EnableUGUI && ClrTypes.Text != null && ClrTypes.Text.IsAssignableFrom( type ) )
|| ( Settings.EnableNGUI && ClrTypes.UILabel != null && ClrTypes.UILabel.IsAssignableFrom( type ) )
|| ( Settings.EnableTextMesh && ClrTypes.TextMesh != null && ClrTypes.TextMesh.IsAssignableFrom( type ) )
|| ( Settings.EnableFairyGUI && ClrTypes.TextField != null && ClrTypes.TextField.IsAssignableFrom( type ) )
|| ( Settings.EnableTextMeshPro && IsKnownTextMeshProType( type ) );
}
public static bool IsKnownTextMeshProType( Type type )
{
if( ClrTypes.TMP_Text != null )
{
return ClrTypes.TMP_Text.IsAssignableFrom( type );
}
else
{
return ClrTypes.TextMeshProUGUI?.IsAssignableFrom( type ) == true
|| ClrTypes.TextMeshPro?.IsAssignableFrom( type ) == true;
}
}
public static bool SupportsRichText( this object ui )
{
if( ui == null ) return false;
var type = ui.GetType();
return ( ClrTypes.Text != null && ClrTypes.Text.IsAssignableFrom( type ) && Equals( type.CachedProperty( SupportRichTextPropertyName )?.Get( ui ), true ) )
|| ( ClrTypes.TextMesh != null && ClrTypes.TextMesh.IsAssignableFrom( type ) && Equals( type.CachedProperty( RichTextPropertyName )?.Get( ui ), true ) )
|| DoesTextMeshProSupportRichText( ui, type )
|| ( ClrTypes.UguiNovelText != null && ClrTypes.UguiNovelText.IsAssignableFrom( type ) )
|| ( ClrTypes.TextField != null && ClrTypes.TextField.IsAssignableFrom( type ) );
}
public static bool DoesTextMeshProSupportRichText( object ui, Type type )
{
if( ClrTypes.TMP_Text != null )
{
return ClrTypes.TMP_Text.IsAssignableFrom( type ) && Equals( type.CachedProperty( RichTextPropertyName )?.Get( ui ), true );
}
else
{
return ( ClrTypes.TextMeshPro?.IsAssignableFrom( type ) == true && Equals( type.CachedProperty( RichTextPropertyName )?.Get( ui ), true ) )
|| ( ClrTypes.TextMeshProUGUI?.IsAssignableFrom( type ) == true && Equals( type.CachedProperty( RichTextPropertyName )?.Get( ui ), true ) );
}
}
public static bool SupportsStabilization( this object ui )
{
if( ui == null ) return false;
return !( ui is GUIContent );
}
public static bool IsSpammingComponent( this object ui )
{
return ui == null || ui is GUIContent;
}
public static bool SupportsLineParser( this object ui )
{
return Settings.GameLogTextPaths.Count > 0 && ui is Component comp && Settings.GameLogTextPaths.Contains( comp.gameObject.GetPath() );
}
public static bool IsNGUI( this object ui )
{
if( ui == null ) return false;
var type = ui.GetType();
return ClrTypes.UILabel != null && ClrTypes.UILabel.IsAssignableFrom( type );
}
public static string GetText( this object ui )
{
if( ui == null ) return null;
string text = null;
TextGetterCompatModeHelper.IsGettingText = true;
try
{
if( ui is GUIContent )
{
text = ( (GUIContent)ui ).text;
}
else
{
// fallback to reflective approach
return GetTextManipulator( ui )?.GetText( ui );
}
}
finally
{
TextGetterCompatModeHelper.IsGettingText = false;
}
return text ?? string.Empty;
}
public static void SetText( this object ui, string text )
{
if( ui == null ) return;
if( ui is GUIContent gui )
{
gui.text = text;
}
else
{
// fallback to reflective approach
GetTextManipulator( ui )?.SetText( ui, text );
}
}
}
} | c# | 22 | 0.580133 | 164 | 31.908629 | 197 | starcoderdata |
export const RESET_QUIZ_SCORE = 'RESET_QUIZ_SCORE';
export const SET_QUIZ_SCORE = 'RESET_QUIZ_SCORE';
export const UPDATE_SCORE = 'UPDATE_SCORE';
export function updateQuizScore(quizScore) {
return {
type: UPDATE_SCORE,
quizScore: {
score: quizScore.score,
},
};
}
export function setQuizScore(quizScore) {
return {
type: SET_QUIZ_SCORE,
quizScore: {
score: quizScore.score,
quizSize: quizScore.quizSize,
showQuizScoreDialog: true,
},
};
}
export function resetQuizScore() {
return {
type: RESET_QUIZ_SCORE,
quizScore: {
score: 0,
quizSize: 0,
showQuizScoreDialog: false,
},
};
} | javascript | 10 | 0.637444 | 51 | 18.794118 | 34 | starcoderdata |
<?php
function addBook(){
if (empty($_POST['adminID'])){
exit("
}
if (empty($_POST['bookNameUp'])){
echo "书名不能为空";
return;
}
if (empty($_POST['bookAuthorUp'])){
echo "作者不能为空";
return;
}
if (empty($_POST['bookCategoryUp'])){
echo "请先择类别";
return;
}
if (empty($_POST['bookPressUp'])){
echo "请输入出版社";
return;
}
if (empty($_POST['bookPublishedDateUp'])){
echo "请先择出版时间";
return;
}
if (empty($_POST['bookIntroductionUp'])){
echo "简介不能为空";
return;
}
$bookName=$_POST['bookNameUp'];
$bookAuthor=$_POST['bookAuthorUp'];
$bookCategory=$_POST['bookCategoryUp'];
$bookPress=$_POST['bookPressUp'];
$bookPublishedDate=$_POST['bookPublishedDateUp'];
$bookIntroduction=$_POST['bookIntroductionUp'];
if ($bookCategory==="网页开发"){
$bookCategory=1;
}
if ($bookCategory==="语言编程"){
$bookCategory=2;
}
if ($bookCategory==="操作系统"){
$bookCategory=3;
}
if ($bookCategory==="数据库"){
$bookCategory=4;
}
if (empty($_FILES['bookCover'])){
echo "请先择封面";
return;
}
if (empty($_FILES['bookResource'])){
echo "请先择文件";
return;
}
$bookCover=$_FILES['bookCover'];
$bookResource=$_FILES['bookResource'];
$bookCoverUrl="../../resources/book/bookCover/".$bookCover['name'];
$bookResourceUrl="../../resources/book/bookDownload/".$bookResource['name'];
if (!move_uploaded_file($bookCover['tmp_name'], $bookCoverUrl)) {
echo '上传封面失败';
return;
}
if (!move_uploaded_file($bookResource['tmp_name'], $bookResourceUrl)) {
echo '上传文件失败';
return;
}
$bookCoverUrl=substr($bookCoverUrl,4);
$bookResourceUrl=substr($bookResourceUrl,4);
date_default_timezone_set("Asia/Shanghai");
$addTime=date("Y-m-d H:i:s");
$connection = mysqli_connect('127.0.0.1', 'root', '147199512', 'digital_library');
if (!$connection) {
echo "数据库链接错误";
return;
}
$query=mysqli_query($connection,"INSERT INTO booklist (
booklist.bookName,
booklist.bookAuthor,
booklist.bookIntroduction,
booklist.bookScore,
booklist.bookCategory,
booklist.bookCover,
booklist.bookUpload,
booklist.bookDownload,
booklist.bookPress,
booklist.bookPublishedDate,
booklist.bookVisits,
booklist.bookBorrow)
VALUES (
'{$bookName}',
'{$bookAuthor}',
'{$bookIntroduction}',
3.5,
'{$bookCategory}',
'{$bookCoverUrl}',
'{$addTime}',
'{$bookResourceUrl}',
'{$bookPress}',
'{$bookPublishedDate}',0,0)");
if (!$query){
echo "上传失败";
return;
}
mysqli_close($connection);
echo "上传成功";
return;
}
if ($_SERVER['REQUEST_METHOD'] === 'POST') {
addBook();
} | php | 11 | 0.545122 | 86 | 24.449153 | 118 | starcoderdata |
namespace CGAL {
/*!
\ingroup PkgMesh3MeshClasses
The class `Mesh_criteria_3` is a model of both concepts `MeshCriteria_3`
and `MeshCriteriaWithFeatures_3`.
It gathers the refinement criteria for mesh tetrahedra and
surface facets where
surface facets are facets in the mesh approximating the domain surface patches.
In addition, for domain with exposed 1-dimensional features,
the class `Mesh_criteria_3`
handles the definition of a sizing field to guide the discretization of
1-dimensional features.
\tparam Tr has to be instantiated with the type used for
`C3T3::Triangulation`,
where `C3T3` is the model of `MeshComplex_3InTriangulation_3`
used in the mesh generation process,
and `C3T3::Triangulation` its nested triangulation type.
\cgalModels `MeshCriteria_3`
\cgalHeading{Example}
\code{.cpp}
// Create a Mesh_criteria_3 object with all cell and facet parameters set
Mesh_criteria_3 criteria (parameters::facet_angle=30,
parameters::facet_size=1,
parameters::facet_distance=0.1,
parameters::cell_radius_edge_ratio=2,
parameters::cell_size=1.5);
// Create a Mesh_criteria_3 object with size ignored (note that the order changed)
Mesh_criteria_3 criteria (parameters::cell_radius_edge_ratio=2,
parameters::facet_angle=30,
parameters::facet_distance=0.1);
\endcode
\sa `MeshCriteria_3`
\sa `MeshCriteriaWithFeatures_3`
\sa `MeshCellCriteria_3`
\sa `MeshEdgeCriteria_3`
\sa `MeshFacetCriteria_3`
\sa `MeshDomainField_3`
\sa `CGAL::Mesh_cell_criteria_3
\sa `CGAL::Mesh_edge_criteria_3
\sa `CGAL::Mesh_facet_criteria_3
\sa `CGAL::Mesh_facet_topology`
*/
template< typename Tr >
class Mesh_criteria_3
{
public:
/// \name Types
/// @{
/*!
The criteria for edges.
*/
typedef Mesh_edge_criteria_3 Edge_criteria;
/*!
The criteria for facets.
*/
typedef Mesh_facet_criteria_3 Facet_criteria;
/*!
The
criteria for cells.
*/
typedef Mesh_cell_criteria_3 Cell_criteria;
/// @}
/// \name Creation
/// @{
/*!
Construction from facet and cell criteria. The edge criteria are ignored
in this case.
*/
Mesh_criteria_3(const Facet_criteria& facet_criteria,
const Cell_criteria& cell_criteria);
/*!
Construction from edge, facet and cell criteria.
*/
Mesh_criteria_3(const Edge_criteria& edge_criteria,
const Facet_criteria& facet_criteria,
const Cell_criteria& cell_criteria);
/*!
\brief Construction from criteria parameters. This constructor uses named
parameters (from for convenient criteria
construction.
\tparam FT must be a model of `Field`
\tparam Fieldi (`i`=1,..,4) should be either a model
of the concept `Field` or a model of the concept `MeshDomainField_3`
The parameters are named parameters and can be passed in any order
provided their name is given (see example below). The name of each
parameter is the one that is written in the description of the
function (e.g. `parameters::facet_size`).
The description of each parameter is as follows:
- `edge_size`: a scalar field (resp. a constant) providing a space varying
(resp. a uniform)
upper bound for the lengths of curve edges. This parameter has to be set to a positive
value when 1-dimensional features protection is used.
- `facet_angle`: a lower bound for the angles (in degrees) of the
surface mesh facets.
- `facet_size`: a scalar field (resp. a constant) describing
a space varying (resp. a uniform) upper-bound or for the radii of the surface Delaunay balls.
- `facet_distance`: a scalar field (resp. a constant) describing a space varying (resp. a uniform)
upper bound for the distance between the facet circumcenter and the center of its surface
Delaunay ball.
- `facet_topology`: the set of topological constraints
which have to be verified by each surface facet. The default value is
`CGAL::FACET_VERTICES_ON_SURFACE`. See `Mesh_facet_topology` manual page to
get all possible values.
- `cell_radius_edge_ratio`: an upper bound for the radius-edge ratio of the mesh tetrahedra.
- `cell_size`: a scalar field (resp. a constant) describing
a space varying (resp. a uniform) upper-bound for the circumradii of the mesh tetrahedra.
Note that each size or distance parameter can be specified using two ways: either as
a scalar field or as a numerical value when the field is uniform.
Each parameter has a special default value `ignored` which means that the
corresponding criterion will be ignored.
Numerical sizing or distance values, as well as scalar fields
should be given in the unit used for coordinates of points in the mesh domain class
of the mesh generation process.
*/
template<typename FT,
typename ...Fieldi>
Mesh_criteria_3(Field1 parameters::edge_size = ignored,
FT parameters::facet_angle = ignored,
Field2 parameters::facet_size = ignored,
Field3 parameters::facet_distance = ignored,
Mesh_facet_topology parameters::facet_topology = CGAL::FACET_VERTICES_ON_SURFACE,
FT parameters::cell_radius_edge_ratio = ignored,
Field4 parameters::cell_size = ignored);
/// @}
}; /* end Mesh_criteria_3 */
} /* end namespace CGAL */ | c | 11 | 0.715493 | 98 | 32.074534 | 161 | starcoderdata |
static CYTHON_INLINE double __pyx_f_7skimage_7_shared_13interpolation_quadratic_interpolation(double __pyx_v_x, double *__pyx_v_f) {
double __pyx_r;
/* "skimage/_shared/interpolation.pxd":116
* """
* return (x * f[2] * (x - 1)) / 2 - \
* x * f[1] * (x - 2) + \ # <<<<<<<<<<<<<<
* (f[0] * (x - 1) * (x - 2)) / 2
*
*/
__pyx_r = (((((__pyx_v_x * (__pyx_v_f[2])) * (__pyx_v_x - 1.0)) / 2.0) - ((__pyx_v_x * (__pyx_v_f[1])) * (__pyx_v_x - 2.0))) + ((((__pyx_v_f[0]) * (__pyx_v_x - 1.0)) * (__pyx_v_x - 2.0)) / 2.0));
goto __pyx_L0;
/* "skimage/_shared/interpolation.pxd":97
*
*
* cdef inline double quadratic_interpolation(double x, double[3] f) nogil: # <<<<<<<<<<<<<<
* """WARNING: Do not use, not implemented correctly.
*
*/
/* function exit code */
__pyx_L0:;
return __pyx_r;
} | c | 19 | 0.451869 | 197 | 34.36 | 25 | inline |
int main(int argc, char *argv[])
{
//create data folder if doesn't exist
std::filesystem::create_directory("data");
std::filesystem::create_directory("data/3DGen");
if (argc==1)
{
UI::interactive_mode();
}
else
{
UI::argument_mode(argc, argv);
}
UI::print_ln("Katana Exited.");
return 0;
} | c++ | 9 | 0.567335 | 52 | 20.875 | 16 | inline |
#!/usr/bin/python
# -*- coding:utf-8 -*-
from common import *
import sys
cmd = """%s/bin/java
-Xmx%dm
-ea
-Dsocialite.master=%s
-Dsocialite.worker.num=%d
-Dlog4j.configuration=file:%s
-Dsocialite.worker.num_threads=4
-Dsocialite.output.dir=%s
-classpath %s
socialite.dist.worker.WorkerNode 2>&1 | tee %s""" % (
JAVA_HOME,
HEAP_SIZE, MASTER_HOSTNAME, WORKER_NUM, SOCIALITE_PREFIX + '/conf/log4j.properties',
SOCIALITE_PREFIX + '/gen', class_path, SOCIALITE_PREFIX + "/logs/worker.log")
cmd = cmd.replace('\n', '')
if len(sys.argv) != 2:
raise IOError("[usage: %s install/update/run]" % sys.argv[0])
if sys.argv[1] == 'install':
os.system('tar -zcf /tmp/out_$USER.tar.gz -C %s out ext conf' % SOCIALITE_PREFIX)
for worker_hostname in WORKER_HOSTNAME_LIST:
if worker_hostname != MASTER_HOSTNAME:
os.system('scp /tmp/out_$USER.tar.gz %s:/tmp' % worker_hostname)
os.system('ssh -n %s "mkdir %s 2> /dev/null"' % (worker_hostname, SOCIALITE_PREFIX))
os.system('ssh -n %s "mkdir %s/logs 2> /dev/null"' % (worker_hostname, SOCIALITE_PREFIX))
os.system('ssh -n %s "tar zxf /tmp/out_$USER.tar.gz -C %s"' % (worker_hostname, SOCIALITE_PREFIX))
elif sys.argv[1] == 'update':
os.system('tar -zcf /tmp/out_$USER.tar.gz -C %s out' % SOCIALITE_PREFIX)
for worker_hostname in WORKER_HOSTNAME_LIST:
if worker_hostname != MASTER_HOSTNAME:
os.system('scp /tmp/out_$USER.tar.gz %s:/tmp' % worker_hostname)
os.system('ssh -n %s "tar zxf /tmp/out_$USER.tar.gz -C %s"' % (worker_hostname, SOCIALITE_PREFIX))
elif sys.argv[1] == 'run':
for worker_hostname in WORKER_HOSTNAME_LIST:
os.system('ssh -n %s "pkill -f WorkerNode"''' % worker_hostname)
os.system('ssh -f -n %s "%s"' % (worker_hostname, cmd)) | python | 13 | 0.633244 | 110 | 44.487805 | 41 | starcoderdata |
pub fn set_block_at(&mut self, x: usize, y: usize, z: usize, block: BlockId) -> Option<()> {
let old_block = self.block_at(x, y, z)?;
let section = self.section_for_y_mut(y)?;
let result = match section {
Some(section) => {
let result = section.set_block_at(x, y % SECTION_HEIGHT, z, block);
// If the block update caused the section to contain only
// air, free it to conserve memory.
if section.is_empty() {
self.clear_section(y);
}
result
}
None => {
if !block.is_air() {
let mut section = ChunkSection::default();
let result = section.set_block_at(x, y % SECTION_HEIGHT, z, block);
self.set_section_at((y / SECTION_HEIGHT) as isize, Some(section));
result
} else {
Some(())
}
}
};
self.heightmaps
.update(x, y, z, old_block, block, Self::block_at_fn(&self.sections));
result
} | rust | 17 | 0.453287 | 92 | 40.321429 | 28 | inline |
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
// XLA-specific base classes for Unary and Binary Ops.
#include "tensorflow/compiler/tf2xla/kernels/cwise_ops.h"
#include "tensorflow/compiler/tf2xla/lib/broadcast.h"
#include "tensorflow/compiler/tf2xla/type_util.h"
#include "tensorflow/compiler/tf2xla/xla_helpers.h"
#include "tensorflow/compiler/tf2xla/xla_op_kernel.h"
#include "tensorflow/compiler/tf2xla/xla_op_registry.h"
#include "tensorflow/compiler/xla/client/client_library.h"
#include "tensorflow/compiler/xla/client/lib/constants.h"
#include "tensorflow/compiler/xla/client/xla_builder.h"
#include "tensorflow/compiler/xla/shape.h"
#include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/framework/tensor_shape.h"
#include "tensorflow/core/framework/types.h"
#include "tensorflow/core/util/bcast.h"
namespace tensorflow {
void XlaBinaryOp::Compile(XlaOpKernelContext* ctx) {
TensorShape lhs_shape = ctx->InputShape(0);
TensorShape rhs_shape = ctx->InputShape(1);
xla::Shape lhs_xla_shape = ctx->InputXlaShape(0).ValueOrDie();
xla::Shape rhs_xla_shape = ctx->InputXlaShape(1).ValueOrDie();
// Fetch the expressions containing the input tensors.
auto lhs_handle = ctx->Input(0);
auto rhs_handle = ctx->Input(1);
if (lhs_shape.dims() == rhs_shape.dims()) {
auto reconcile_tensor_mismatched_dims =
[ctx](xla::XlaOp op, const xla::Shape& lhs_xla_shape,
const xla::Shape& rhs_xla_shape, TensorShape* lhs_tensor_shape) {
// Find out mismatched dimensions that are non-broadcastable.
// Reconcile the
// difference by slicing the bigger dimension.
for (int64_t i = 0; i < lhs_xla_shape.rank(); ++i) {
if (lhs_xla_shape.is_dynamic_dimension(i)) {
if (!rhs_xla_shape.is_dynamic_dimension(i) &&
lhs_xla_shape.dimensions(i) > rhs_xla_shape.dimensions(i) &&
rhs_xla_shape.dimensions(i) != 1) {
// e.g., :
// lhs = [..., <=N, ...]
// rhs = [..., 2 , ...]
// Slice N into 2.
// Size 1 dim doesn't need slice as the other side is
// broadcastable.
auto size = xla::GetDimensionSize(op, i);
op = xla::SliceInDim(op, 0, rhs_xla_shape.dimensions(i), 1,
/*dimno=*/i);
lhs_tensor_shape->set_dim(i, rhs_xla_shape.dimensions(i));
// Propagate dynamic dimension.
op = xla::SetDimensionSize(op, size, i);
}
if (rhs_xla_shape.is_dynamic_dimension(i) &&
lhs_xla_shape.dimensions(i) < rhs_xla_shape.dimensions(i) &&
rhs_xla_shape.dimensions(i) != 1 &&
lhs_xla_shape.dimensions(i) != 1) {
// e.g., :
// lhs = [..., <=M, ...]
// rhs = [..., <=N , ...]
// where M < N
//
// In this case we pad M into N to make the bounds the same.
// Note that we can't slice N into M because M could be a
// dynamic size 1 dim that's meant to be broadcasted to N.
auto size = xla::GetDimensionSize(op, i);
int64_t diff =
rhs_xla_shape.dimensions(i) - lhs_xla_shape.dimensions(i);
op = xla::PadInDim(
op, xla::Zero(ctx->builder(), lhs_xla_shape.element_type()),
i, 0, diff);
lhs_tensor_shape->set_dim(i, rhs_xla_shape.dimensions(i));
// Propagate dynamic dimension.
op = xla::SetDimensionSize(op, size, i);
}
}
}
return op;
};
lhs_handle = reconcile_tensor_mismatched_dims(lhs_handle, lhs_xla_shape,
rhs_xla_shape, &lhs_shape);
rhs_handle = reconcile_tensor_mismatched_dims(rhs_handle, rhs_xla_shape,
lhs_xla_shape, &rhs_shape);
}
// By TensorFlow conventions the inputs may not have the same
// shapes, in which case they will be automatically broadcast if
// possible before mapping. Use the standard TensorFlow helper to
// compute valid broadcast shapes, but rely below on XLA to
// automatically perform the broadcast assuming its valid shapes are
// a superset of TensorFlow's valid shapes.
BCast bcast(BCast::FromShape(lhs_shape), BCast::FromShape(rhs_shape),
/*fewer_dims_optimization=*/false);
if (!bcast.IsValid()) {
ctx->SetStatus(errors::InvalidArgument("Incompatible shapes: ",
lhs_shape.DebugString(), " vs. ",
rhs_shape.DebugString()));
return;
}
// If the ranks of the inputs don't match, TensorFlow automatically
// reshapes the smaller by padding with dimensions of size 1 as a
// prefix. In other words to pad a 5-vector to a 3-dimensional
// tensor it is reshaped to have shape [1,1,5]. XLA's automatic
// broadcast code is able to broadcast from lower to higher rank,
// but doesn't assume you want to pad as a prefix of the dimensions,
// and instead needs to be told which dimensions of the higher rank
// tensor to match to the lower rank tensor. In this example it
// would be dimensions [2]. If we were matching a matrix against a
// 4-D tensor the dimensions to match would be [2,3],
// etc. extend_dimension encodes the general case.
std::vector extend_dimension;
int max_rank = std::max(lhs_shape.dims(), rhs_shape.dims());
int min_rank = std::min(lhs_shape.dims(), rhs_shape.dims());
if (min_rank != max_rank) {
for (int i = 0; i < min_rank; ++i) {
// Match the lower rank tensor along the larger-numbered
// dimensions of the higher rank tensor.
extend_dimension.push_back(max_rank - min_rank + i);
}
}
// Call virtual method to emit the computation.
xla::XlaOp output =
Computation(ctx, lhs_handle, lhs_shape.dim_sizes(), rhs_handle,
rhs_shape.dim_sizes(), bcast, extend_dimension);
// The TensorFlow helper computed the post-broadcast shape in
// output_shape: we rely on subclassed Computations to implement the
// same broadcast semantics.
ctx->SetOutput(0, output);
}
/* static */ std::pair<xla::XlaOp, xla::XlaOp> XlaBinaryOp::Broadcast(
xla::XlaOp lhs, xla::XlaOp rhs, const BCast& broadcast_helper) {
auto lhs_output = BroadcastTo(lhs, broadcast_helper.output_shape());
if (!lhs_output.ok()) {
xla::XlaOp error = lhs.builder()->ReportError(lhs_output.status());
return {error, error};
}
auto rhs_output = BroadcastTo(rhs, broadcast_helper.output_shape());
if (!rhs_output.ok()) {
xla::XlaOp error = rhs.builder()->ReportError(rhs_output.status());
return {error, error};
}
return {lhs_output.ValueOrDie(), rhs_output.ValueOrDie()};
}
} // namespace tensorflow | c++ | 25 | 0.610227 | 80 | 46.030675 | 163 | starcoderdata |
from flask_wtf import FlaskForm
from wtforms import PasswordField, StringField
class LoginForm(FlaskForm):
username = StringField("Käyttäjätunnus")
password = PasswordField("
class Meta:
csrf = False | python | 8 | 0.721519 | 46 | 25.444444 | 9 | starcoderdata |
'use strict';// eslint-disable-line
require('coffee-script').register();
const Linguist = require('atom-linguist');
const fs = require('fs');
const path = require('path');
const EventEmitter = require('events');
const yaml = require('js-yaml');
/**
* Project Watcher
* @class
*/
class ProjectWatcher extends EventEmitter {
constructor(projectPath) {
super();
if (typeof projectPath !== 'string') {
throw 'path should be a string';
}
if (!path.isAbsolute(projectPath)) {
throw 'path is not absolute';
}
this.projectPath = projectPath;
this._watchBuffer = [];
this._lastDate = 0;
//count file change event as coding hours, maximum value in ms.
this.maxCodingDuration = 10 * 60 * 1000;
//gether watch events those coincidence duration is less than specified in ms.
this.lumpThreshold = 500;
//determine ignore
this.ignores = [];
//hidden path
this.ignores.push(/(^|\/)\.[^\/\.]/);
//vendor files
this.ignores.push(new RegExp(yaml.safeLoad(fs.readFileSync(`${__dirname}/../data/vendor.yml`, {json: true})).join('|')));
this.start();
}
/**
* start watching
*/
start() {
this._watcher = fs.watch(this.projectPath, { persistent: true, recursive: true }, this._onFileEvent.bind(this));
}
/**
* stop watching
*/
stop() {
if (this._watcher) {
this._watcher.close();
}
}
/**
* on file watch event arised
* @param event
* @param filename
*/
_onFileEvent(event, filename) {
//if there is no filename provided, skip.
if (typeof filename !== 'string') {
return;
}
//if the filename matches ignore, skip.
for (let ignore of this.ignores) {
if (filename.match(ignore) !== null) {
return;
}
}
//detect language of the file
let lang = null;
try {
lang = Linguist.detect(path.resolve(this.projectPath, filename));
} catch (e) {
return e;
}
this.registerCoding(filename, lang);
}
/**
* register coding
* @param filename
* @param lang
*/
registerCoding(filename, lang) {
this._watchBuffer.push({
path: filename,
lang: lang,
date: Date.now()
});
//lumping events
if (this._lumpTimeout) {
clearTimeout(this._lumpTimeout);
}
this._lumpTimeout = setTimeout(() => {
let now = Date.now();
this.emit('codingHour', {
//duration is 0~1000 in ms.
duration: Math.min(Math.max(now - this._lastDate, 0), this.maxCodingDuration),
//assume first one file is the user coded, rest are auto-generated/vendored files.
lang: this._watchBuffer[0].lang,
files: this._watchBuffer
});
this._watchBuffer = [];
this._lastDate = now;
this._lumpTimeout = null;
}, this.lumpThreshold);
}
}
module.exports = ProjectWatcher; | javascript | 15 | 0.600276 | 125 | 22.942149 | 121 | starcoderdata |
from flask_cors import CORS
from flask import Flask, jsonify
from logging_config import setup_logging
import logging as logger
from sentry_init import sentry_init
from views.convert import convert_blueprint
setup_logging()
sentry_init()
app = Flask(__name__)
app.config["JSON_AS_ASCII"] = False
# 10M
app.config["MAX_CONTENT_LENGTH"] = 1024 * 1024 * 10
# support document type
app.config["UPLOAD_EXTENSIONS"] = [
".docx",
".doc",
".xlsx",
".xls",
".ppt",
".pptx",
".md",
]
app.register_blueprint(convert_blueprint)
CORS(app)
# Return validation errors as JSON
@app.errorhandler(422)
def handle_error(err):
headers = err.data.get("headers", None)
messages = err.data.get("messages", ["Invalid request."])
if headers:
return jsonify({"errors": messages}), err.code, headers
else:
return jsonify({"errors": messages}), err.code
@app.errorhandler(400)
@app.errorhandler(404)
def handle_error(err):
if isinstance(err.description, str):
return err.description, err.code
return jsonify(error=err.description), err.code
@app.route("/", methods=["GET"])
def index():
return jsonify("ok"), 200
if __name__ == "__main__":
app.run(host="0.0.0.0") | python | 14 | 0.662338 | 63 | 20.614035 | 57 | starcoderdata |
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using JetBrains.Annotations;
namespace WebsitePoller.Entities
{
public abstract class SettingsBase
{
public string TimeZone { get; set; }
public int[] PostalCodes { get; set; }
public string[] Cities { get; set; }
public decimal MaxEigenmittel { get; set; }
public decimal MaxMonatlicheKosten { get; set; }
public int MinNumberOfRooms { get; set; }
public PostalAddress PostalAddress { get; set; }
public int PollingIntervallInSeconds { get; set; }
public Uri Url { get; set; }
protected SettingsBase()
{
}
protected static IEqualityComparer SettingsBaseComparer => new SettingsBaseEqualityComparer();
#region ISerializable
protected SettingsBase([NotNull]SerializationInfo info, StreamingContext context)
{
TimeZone = info.GetValue
MaxEigenmittel = info.GetValue
MaxMonatlicheKosten = info.GetValue
MinNumberOfRooms = info.GetValue
PostalAddress = info.GetValue
PollingIntervallInSeconds = info.GetValue
PostalCodes = info.GetValue
Cities = info.GetValue
Url = info.GetValue
}
protected void GetObjectDataBase([NotNull]SerializationInfo info, StreamingContext context)
{
info.AddValue("timezone", TimeZone);
info.AddValue("postalCodes", PostalCodes);
info.AddValue("cities", Cities);
info.AddValue("maxEigenmittel", MaxEigenmittel);
info.AddValue("maxMonatlicheKosten", MaxMonatlicheKosten);
info.AddValue("minNumberOfRooms", MinNumberOfRooms);
info.AddValue("postalAddress", PostalAddress);
info.AddValue("pollingIntervallInSeconds", PollingIntervallInSeconds);
info.AddValue("url", Url);
}
#endregion
}
} | c# | 15 | 0.647242 | 116 | 40.648148 | 54 | starcoderdata |
package andioopp.model.domain.entity;
import andioopp.common.math.dimension.Dimension;
import andioopp.common.math.rectangle.Rectangle;
import andioopp.common.math.rectangle.ImmutableRectangle;
import andioopp.common.math.vector.Vector3f;
import andioopp.model.domain.money.Money;
import andioopp.model.util.ModelCoordinate;
/**
* A dropped coin that exists in the game world.
*/
public class DroppedCoinEntity {
private final Rectangle rectangle;
private final Money value;
private final Dimension dimension = new Dimension(new ModelCoordinate(0.3f, 0.4f));
public DroppedCoinEntity(Vector3f position, Money value) {
this.rectangle = new ImmutableRectangle(position, dimension);
this.value = value;
}
public Money getValue() {
return value;
}
public ModelCoordinate getPosition() {
return new ModelCoordinate(rectangle.getPosition());
}
public Dimension getSize() {
return rectangle.getSize();
}
} | java | 8 | 0.734115 | 87 | 27.416667 | 36 | starcoderdata |
using System;
using System.Linq;
using System.Threading.Tasks;
using DSharpPlus;
using DSharpPlus.Entities;
using DSharpPlus.Exceptions;
using DSharpPlus.Interactivity;
using DSharpPlus.Interactivity.Extensions;
using DSharpPlus.SlashCommands;
using DSharpPlus.SlashCommands.Attributes;
using Lilia.Database;
using Lilia.Services;
namespace Lilia.Modules;
public class ModerationModule : ApplicationCommandModule
{
private LiliaClient _client;
private LiliaDbContext _dbCtx;
public ModerationModule(LiliaClient client)
{
this._client = client;
this._dbCtx = client.Database.GetContext();
}
[SlashCommand("ban", "Ban members, obviously")]
[SlashRequirePermissions(Permissions.BanMembers)]
public async Task BanMembersCommand(InteractionContext ctx,
[Option("reason", "Reason to ban")] string reason = "")
{
await ctx.DeferAsync();
DiscordMessage msg = await ctx.EditResponseAsync(new DiscordWebhookBuilder()
.WithContent($"{Formatter.Bold("Mention")} all the people you want to ban"));
InteractivityExtension interactivity = ctx.Client.GetInteractivity();
var res = await interactivity.WaitForMessageAsync(x => x.MentionedUsers.Any());
if (res.TimedOut)
{
await ctx.FollowUpAsync(new DiscordFollowupMessageBuilder()
.WithContent("Time exceeded"));
return;
}
await ctx.FollowUpAsync(new DiscordFollowupMessageBuilder()
.WithContent("Banning mischievous people"));
DiscordFollowupMessageBuilder builder = new DiscordFollowupMessageBuilder();
foreach (DiscordUser user in res.Result.MentionedUsers)
{
DiscordMember member = (DiscordMember) user;
if (member == ctx.Member)
{
builder.WithContent("Beaned you");
}
else
{
reason = string.IsNullOrWhiteSpace(reason)
? $"Banned by {ctx.Member.DisplayName}#{ctx.Member.Discriminator}"
: reason;
await ctx.Guild.BanMemberAsync(member, 0, reason);
builder.WithContent($"Banned {member.DisplayName}#{member.Discriminator}");
}
}
await ctx.FollowUpAsync(builder);
}
[SlashCommand("kick", "Kick members, obviously")]
[SlashRequirePermissions(Permissions.KickMembers)]
public async Task KickMembersCommand(InteractionContext ctx,
[Option("reason", "Reason to kick")] string reason = "")
{
await ctx.DeferAsync();
DiscordMessage msg = await ctx.EditResponseAsync(new DiscordWebhookBuilder()
.WithContent($"{Formatter.Bold("Mention")} all the people you want to kick"));
InteractivityExtension interactivity = ctx.Client.GetInteractivity();
var res = await interactivity.WaitForMessageAsync(x => x.MentionedUsers.Any());
if (res.TimedOut)
{
await ctx.FollowUpAsync(new DiscordFollowupMessageBuilder()
.WithContent("Time exceeded"));
return;
}
await ctx.FollowUpAsync(new DiscordFollowupMessageBuilder()
.WithContent("Kicking mischievous people"));
DiscordFollowupMessageBuilder builder = new DiscordFollowupMessageBuilder();
foreach (DiscordUser user in res.Result.MentionedUsers)
{
DiscordMember member = (DiscordMember) user;
if (member == ctx.Member)
{
builder.WithContent("Imagine kicking yourself, smh");
}
else
{
reason = string.IsNullOrWhiteSpace(reason)
? $"Kicked by {ctx.Member.DisplayName}#{ctx.Member.Discriminator}"
: reason;
await member.RemoveAsync(reason);
builder.WithContent($"Kicked {member.DisplayName}#{member.Discriminator}");
}
}
await ctx.FollowUpAsync(builder);
}
[SlashCommand("sendpsa", "Send PSA to a channel without the member knowing the sender")]
[SlashRequirePermissions(Permissions.ManageGuild)]
public async Task SendPsaCommand(InteractionContext ctx,
[Option("message_id", "Message ID to copy, same channel as command")] string msgId,
[ChannelTypes(ChannelType.Text, ChannelType.News, ChannelType.Store, ChannelType.NewsThread, ChannelType.PublicThread)]
[Option("channel", "Channel to send")] DiscordChannel channel)
{
try
{
await ctx.DeferAsync();
DiscordMessage msg = await ctx.Channel.GetMessageAsync(Convert.ToUInt64(msgId));
await channel.SendMessageAsync(msg.Content);
await ctx.EditResponseAsync(new DiscordWebhookBuilder()
.WithContent("Sent to the destination channel"));
}
catch (NotFoundException)
{
await ctx.EditResponseAsync(new DiscordWebhookBuilder()
.WithContent("Message with specified ID was not found in this channel"));
}
catch (FormatException)
{
await ctx.EditResponseAsync(new DiscordWebhookBuilder()
.WithContent("Invalid message ID"));
}
}
[SlashCommand("editpsa", "Edit an existing PSA")]
[SlashRequirePermissions(Permissions.ManageGuild)]
public async Task EditPsaCommand(InteractionContext ctx,
[Option("old_message_id", "Old message ID")] string msgIdOld,
[Option("new_message_id", "New message ID, same channel as command")] string msgIdNew,
[ChannelTypes(ChannelType.Text, ChannelType.News, ChannelType.Store, ChannelType.NewsThread, ChannelType.PublicThread)]
[Option("channel", "Previously sent PSA channel")] DiscordChannel channel)
{
try
{
await ctx.DeferAsync();
DiscordMessage oldMsg = await channel.GetMessageAsync(Convert.ToUInt64(msgIdOld));
if (oldMsg.Author != ctx.Client.CurrentUser)
{
await ctx.EditResponseAsync(new DiscordWebhookBuilder()
.WithContent("I was not the one to write the PSA"));
return;
}
DiscordMessage newMsg = await ctx.Channel.GetMessageAsync(Convert.ToUInt64(msgIdNew));
await oldMsg.ModifyAsync(newMsg.Content);
await ctx.EditResponseAsync(new DiscordWebhookBuilder()
.WithContent("Edited the message"));
}
catch (NotFoundException)
{
await ctx.EditResponseAsync(new DiscordWebhookBuilder()
.WithContent("Either the old message was not found in the provided channel or the new message was not found in this channel"));
}
catch (FormatException)
{
await ctx.EditResponseAsync(new DiscordWebhookBuilder()
.WithContent("Invalid message ID(s)"));
}
}
} | c# | 23 | 0.623402 | 143 | 35.701031 | 194 | starcoderdata |
struct LoopTransformation {
LoopTransformType type;
LoopTransformSide side;
size_t count;
LoopTransformation(LoopTransformType type, LoopTransformSide side,
size_t count)
: type(type), side(side), count(count) {}
};
template <typename T>
Integer getReturnValue(const Call<T> &call,
const AnalysisResultsMap &analysisResults) {
// Non int return values should not exist so this is safe
return call.returnState.variables
.find(analysisResults.at(call.function).returnInstruction)
->second;
}
template <typename T>
MonoPair<Integer> getReturnValues(const Call<T> &call1, const Call<T> &call2,
const AnalysisResultsMap &analysisResults) {
return {getReturnValue(call1, analysisResults),
getReturnValue(call2, analysisResults)};
} | c | 14 | 0.667816 | 78 | 36.869565 | 23 | inline |
import java.util.LinkedList;
import java.util.List;
import java.util.Scanner;
public class Main {
public static void main(String[] args){
try(Scanner sc = new Scanner(System.in)) {
String s = sc.next();
int q = sc.nextInt();
List<String> sList = new LinkedList<>();
List<String> revList = new LinkedList<>();
boolean isRev = false;
sList.add(s);
revList.add(new StringBuilder(s).reverse().toString());
for(int i = 0 ; i < q ; i++ ) {
int op = sc.nextInt();
if ( op == 1 ) {
isRev = !isRev;
}
else {
int f = sc.nextInt();
String c = sc.next();
if ( isRev) {
if ( f == 1 ) {
//add rev head
sList.add(c);
revList.add(0,c);
}
else {
sList.add(0,c);
revList.add(c);
}
}
else {
if ( f == 1 ) {
//add head
sList.add(0, c);
revList.add(c);
}
else {
sList.add(c);
revList.add(0,c);
}
}
}
}
StringBuilder sb = new StringBuilder();
if (isRev) {
for(String ss : revList) {
sb.append(ss);
}
}
else {
for(String ss : sList) {
sb.append(ss);
}
}
System.out.println(sb.toString());
}
}
} | java | 18 | 0.482595 | 58 | 16.816901 | 71 | codenet |
void DeepSearch(int x,int y,char color){
status[stage].x=x;
status[stage].y=y;
setDol(x, y, color);
stage++;
// printf("deep \n");
// printBoard();
} | c | 7 | 0.561404 | 40 | 20.5 | 8 | inline |
Lessons - C++/Lesson 5 - Digital clock project/bme280.cpp
#include "bme280.h"
enum RegisterAddresses
{
BME280_ADDR = 0x76,
CALIB_00_25_ADDR = 0x88,
CALIB_26_41_ADDR = 0xE1,
CTRL_HUM_ADDR = 0xF2,
CTRL_MEAS_ADDR = 0xF4,
DATA_REG_ADDR = 0xF7
};
enum BitFields
{
CTRL_HUM_OSRS_H_OVR1 = 0x01,
CTRL_HUM_OSRS_H_OVR2 = 0x02,
CTRL_HUM_OSRS_H_OVR4 = 0x03,
CTRL_HUM_OSRS_H_OVR8 = 0x04,
CTRL_HUM_OSRS_H_OVR16 = 0x05,
CTRL_MEAS_MODE_SLEEP = 0x00,
CTRL_MEAS_MODE_FORCED1 = 0x01,
CTRL_MEAS_MODE_FORCED2 = 0x02,
CTRL_MEAS_MODE_NORMAL = 0x03,
CTRL_MEAS_OSRS_T_OVR1 = 1<<5,
CTRL_MEAS_OSRS_T_OVR2 = 2<<5,
CTRL_MEAS_OSRS_T_OVR4 = 3<<5,
CTRL_MEAS_OSRS_T_OVR8 = 4<<5,
CTRL_MEAS_OSRS_T_OVR16 = 5<<5
};
typedef struct
{
uint16_t dig_T1;
int16_t dig_T2;
int16_t dig_T3;
uint8_t dig_H1;
int16_t dig_H2;
uint8_t dig_H3;
int16_t dig_H4;
int16_t dig_H5;
int8_t dig_H6;
}bme280CompParam_t;
const uint8_t I2C_HAL_TIMEOUT = 2;
const uint8_t ADC_REGISTER_SIZE = 8;
const uint8_t CALIB_00_25_SIZE = 26;
const uint8_t CALIB_26_41_SIZE = 16;
static bme280CompParam_t bme280;
static uint8_t rawAdcValue[ADC_REGISTER_SIZE];
static uint8_t bme280Calib00_25[CALIB_00_25_SIZE];
static uint8_t bme280Calib26_41[CALIB_26_41_SIZE];
static int32_t t_fine;
static void BME280_StoreCompensationParameters(uint8_t* calibReg1,
uint8_t* calibReg2,
bme280CompParam_t* bme)
{
bme->dig_T1 = (calibReg1[0] | (calibReg1[1] << 8));
bme->dig_T2 = (calibReg1[2] | (calibReg1[3] << 8));
bme->dig_T3 = (calibReg1[4] | (calibReg1[5] << 8));
bme->dig_H1 = calibReg1[25];
bme->dig_H2 = (calibReg2[0] | (calibReg2[1] << 8));
bme->dig_H3 = calibReg2[2];
bme->dig_H4 = ((calibReg2[3] << 4) | (calibReg2[4] & 0x0F));
bme->dig_H5 = (((calibReg2[4] & ~(0x0F)) >> 4) | (calibReg2[5] << 4));
bme->dig_H6 = calibReg2[6];
}
BME280::BME280(I2C_TypeDef* I2Cx,pinStruct_t& i2cPin1,pinStruct_t& i2cPin2)
{
GPIO_InitTypeDef i2cGPIO1InitStruct = {0};
GPIO_InitTypeDef i2cGPIO2InitStruct = {0};
//GPIO configuration (SDA and SCL in any order)
i2cGPIO1InitStruct.Pin = i2cPin1.selectedPin;
i2cGPIO1InitStruct.Mode = GPIO_MODE_AF_OD;
i2cGPIO1InitStruct.Pull = GPIO_PULLUP;
i2cGPIO1InitStruct.Alternate = 0x04; //alternate function for I2C
HAL_GPIO_Init(i2cPin1.port,&i2cGPIO1InitStruct);
i2cGPIO2InitStruct.Pin = i2cPin2.selectedPin;
i2cGPIO2InitStruct.Mode = GPIO_MODE_AF_OD;
i2cGPIO2InitStruct.Pull = GPIO_PULLUP;
i2cGPIO2InitStruct.Alternate = 0x04; //alternate function for I2C
HAL_GPIO_Init(i2cPin2.port,&i2cGPIO2InitStruct);
//I2C configuration
hi2c.Instance = I2Cx;
hi2c.Init.ClockSpeed = 100000;
hi2c.Init.AddressingMode = I2C_ADDRESSINGMODE_7BIT;
HAL_I2C_Init(&hi2c);
//Read sensor's calibration data
HAL_I2C_Mem_Read(&hi2c,
BME280_ADDR<<1,
CALIB_00_25_ADDR,
I2C_MEMADD_SIZE_8BIT,
bme280Calib00_25,
CALIB_00_25_SIZE,
I2C_HAL_TIMEOUT);
//Read the first 7 calibration data of calib26_41 register of bme280
HAL_I2C_Mem_Read(&hi2c,
BME280_ADDR<<1,
CALIB_26_41_ADDR,
I2C_MEMADD_SIZE_8BIT,
bme280Calib26_41,
7,
I2C_HAL_TIMEOUT);
BME280_StoreCompensationParameters(bme280Calib00_25,bme280Calib26_41,&bme280);
}
bme280Data_t BME280::GetData(void)
{
bme280Data_t bme280Data = {0};
uint8_t sensorConfig = CTRL_HUM_OSRS_H_OVR16;
HAL_I2C_Mem_Write(&hi2c,
BME280_ADDR<<1,
CTRL_HUM_ADDR,
I2C_MEMADD_SIZE_8BIT,
&sensorConfig,
1,
I2C_HAL_TIMEOUT);
sensorConfig = CTRL_MEAS_OSRS_T_OVR16|CTRL_MEAS_MODE_FORCED1;
HAL_I2C_Mem_Write(&hi2c,
BME280_ADDR<<1,
CTRL_MEAS_ADDR,
I2C_MEMADD_SIZE_8BIT,
&sensorConfig,
1,
I2C_HAL_TIMEOUT);
HAL_I2C_Mem_Read(&hi2c,
BME280_ADDR<<1,
DATA_REG_ADDR,
I2C_MEMADD_SIZE_8BIT,
rawAdcValue,
ADC_REGISTER_SIZE,
I2C_HAL_TIMEOUT);
//Temperature calculation
int32_t adc_T, var1, var2, T;
adc_T = ((rawAdcValue[3] << 12) | (rawAdcValue[4] << 4) | (rawAdcValue[5] >> 4));
var1 = ((((adc_T>>3) - ((int32_t)bme280.dig_T1<<1))) * ((int32_t)bme280.dig_T2)) >> 11;
var2 = (((((adc_T>>4) - ((int32_t)bme280.dig_T1)) *
((adc_T>>4) - ((int32_t)bme280.dig_T1))) >> 12) * ((int32_t)bme280.dig_T3)) >> 14;
t_fine = var1 + var2;
T = (t_fine * 5 + 128) >> 8;
bme280Data.temperature = T / 100;
//Humidity calculation
int32_t adc_H, v_x1_u32r;
adc_H = ((rawAdcValue[6] << 8) | rawAdcValue[7]);
v_x1_u32r = (t_fine - ((uint32_t)76800));
v_x1_u32r = (((((adc_H << 14) - (((int32_t)bme280.dig_H4) << 20) -
(((int32_t)bme280.dig_H5) * v_x1_u32r)) +
((int32_t)16384)) >> 15) *
(((((((v_x1_u32r * ((int32_t)bme280.dig_H6)) >> 10) *
(((v_x1_u32r * ((int32_t)bme280.dig_H3)) >> 11) +
((int32_t)32768))) >> 10) + ((int32_t)2097152)) *
((int32_t)bme280.dig_H2) + 8192) >> 14));
v_x1_u32r = (v_x1_u32r - (((((v_x1_u32r >> 15) * (v_x1_u32r >> 15)) >> 7) * ((int32_t)bme280.dig_H1)) >> 4));
v_x1_u32r = (v_x1_u32r < 0) ? 0 : v_x1_u32r;
v_x1_u32r = (v_x1_u32r > 419430400) ? 419430400 : v_x1_u32r;
uint32_t rawHumidity = (uint32_t)(v_x1_u32r >> 12);
bme280Data.humidity = rawHumidity / 1024;
return bme280Data;
} | c++ | 27 | 0.598594 | 110 | 30.168539 | 178 | starcoderdata |
static void CommitFlexWeights( D3DDeviceWrapper *pDevice, const DynamicState_t &desiredState,
DynamicState_t ¤tState, bool bForce )
{
if ( IsX360() )
{
// not supporting for 360
return;
}
CommitVertexShaderConstantRange( pDevice, desiredState, currentState, bForce,
VERTEX_SHADER_FLEX_WEIGHTS, VERTEX_SHADER_MAX_FLEX_WEIGHT_COUNT );
} | c++ | 7 | 0.743094 | 94 | 29.25 | 12 | inline |
<?php
namespace SendyPHP\Exception;
/**
* Invalid custom field name Exception
*
* is usually thrown if name of custom field match with some of reserved keywords
*
* @package SendyPHP
*/
class ForbiddenCustomFieldNameException extends DomainException
{
/**
* Invalid URL Exception
*
* @param string $customFieldName
* @param int $code
* @param \Exception $previous
*/
public function __construct($customFieldName, $code = 0, \Exception $previous = NULL)
{
parent::__construct($this->_buildMessage($customFieldName), $code, $previous);
}
/**
* Builds Exception message text
*
* @param mixed $customFieldName
* @return string
*/
protected function _buildMessage($customFieldName)
{
$message = 'Forbidden custom field name detected ';
if(is_string($customFieldName))
$message.= ' - [.'.$customFieldName.'.] - this field name is reserved.';
else
$message.= ' - '.gettype($customFieldName).' given ['.var_export($customFieldName,1).'], string expected.';
return $message;
}
} | php | 17 | 0.646914 | 119 | 30.179487 | 39 | starcoderdata |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import json
from os.path import split
from django.core.files.storage import default_storage
from django.db import connection, transaction
from django.shortcuts import redirect, render
from django.views import View
from common.mixins import AdminRequiredMixin
from .forms import ImportDataForm, UploadFileForm
from .models import TaskHistory
from .processing.errors import FileError, MultipleUuidError, NoRequiredColumnError, UnnamedColumnError
from .processing.processing_main import process_file
class ImportData(AdminRequiredMixin, View):
stop_error = False
errors = None
report_list = None
task = None
def post(self, request):
form_upload = UploadFileForm(request.POST, request.FILES)
form_import = ImportDataForm(request.POST)
if form_upload.is_valid():
try:
records_for_add, records_for_update, warnings, errors, report_dict = process_file(request.FILES['file'])
except (FileError, ValueError, UnnamedColumnError, MultipleUuidError, NoRequiredColumnError) as error:
stop_error = True
stop_error_msg = [error.description] + [error.message]
return render(request, 'imports/import_data_page.html', {
'form': {'form_upload': form_upload}, 'stop_error': stop_error, 'stop_error_msg': stop_error_msg})
except Exception:
stop_error = True
stop_error_msg = ['Unexpected error occurred.']
return render(request, 'imports/import_data_page.html', {
'form': {'form_upload': form_upload}, 'stop_error': stop_error, 'stop_error_msg': stop_error_msg})
task = form_upload.save(commit=False)
task.webuser_id = request.user.pk
task.save()
f = TaskHistory(
changed_at=task.uploaded_at, old_state='n', new_state='u', webuser_id=request.user.id, task_id=task.id,
errors=errors, warnings=warnings, report_dict=report_dict
)
f.save()
return render(
request, 'imports/import_data_page.html', {
'form': {'form_upload': form_upload, 'form_import': form_import}, 'errors': errors,
'report_dict': report_dict, 'task_id': task.pk, 'warnings': warnings
}
)
def get(self, request):
form_upload = UploadFileForm()
form_import = ImportDataForm()
return render(
request, 'imports/import_data_page.html', {
'form': {'form_upload': form_upload, 'form_import': form_import}
}
)
class ImportDataTask(AdminRequiredMixin, View):
def get(self, request, task_id):
task_id = int(task_id)
with connection.cursor() as cur:
cur.execute(
"""
SELECT true
FROM public.imports_taskhistory
WHERE public.imports_taskhistory.task_id = %s AND public.imports_taskhistory.new_state = 'i';
""", [task_id]
)
if cur.fetchone():
return redirect('/import_history')
else:
pass
cur.execute(
"""
SELECT public.imports_task.file FROM public.imports_task WHERE imports_task.id = %s;
""", [task_id]
)
pathname = default_storage.open(cur.fetchone()[0])
records_for_add, records_for_update, warnings, errors, report_dict = process_file(pathname)
with transaction.atomic():
with connection.cursor() as cursor:
cursor.execute(
'INSERT INTO features.changeset (webuser_id, changeset_type) VALUES (%s, %s) RETURNING id', (
request.user.pk, 'I'
)
)
changeset_id = cursor.fetchone()[0]
cursor.execute(
'UPDATE public.imports_task SET changeset_id = %s WHERE public.imports_task.id = %s', (
changeset_id,
task_id
)
)
if len(records_for_add) > 0:
try:
with transaction.atomic():
with connection.cursor() as cursor:
for record in records_for_add:
cursor.execute(
'SELECT core_utils.create_feature(%s, %s) ', (
changeset_id,
json.dumps(record)
)
)
except Exception:
raise
if len(records_for_update) > 0:
try:
with transaction.atomic():
with connection.cursor() as cursor:
for record in records_for_update:
cursor.execute(
"""
SELECT core_utils.update_feature(%s, %s, %s)
""", (
changeset_id,
record['feature_uuid'],
json.dumps(record)
)
)
except Exception:
raise
task_history = TaskHistory(
old_state='u', new_state='i', webuser_id=request.user.id, task_id=task_id, report_dict=report_dict
)
task_history.save()
return redirect('/import_history')
class ImportHistory(AdminRequiredMixin, View):
def get(self, request):
with transaction.atomic():
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT task_id, file, to_char(changed_at, 'YYYY-MM-DD HH24:MI:SS TZ'), new_state,
imports_task.webuser_id
FROM public.imports_task INNER JOIN public.imports_taskhistory
ON public.imports_task.id = public.imports_taskhistory.task_id
WHERE public.imports_task.webuser_id=%s
ORDER BY changed_at DESC;
""", [request.user.id]
)
task_history_states = cursor.fetchall()
history_list = []
for task_id, file_path, changed_at, new_state, _ in task_history_states:
if new_state == TaskHistory.STATE_UPLOADED:
file_name = split(file_path)[1]
history_list.append({
'task_id': task_id, 'updated_at': changed_at, 'file_name': file_name, 'imported_at': None,
'file_path': default_storage.url(file_path)
})
for task_id, _, changed_at, new_state, _ in task_history_states:
if new_state == TaskHistory.STATE_INSERTED:
for index, item in enumerate(history_list):
if item['task_id'] == task_id:
history_list[index]['imported_at'] = changed_at
return render(request, 'imports/import_history_page.html', {'history_list': history_list})
class TaskHistoryView(AdminRequiredMixin, View):
def get(self, request, task_id):
with transaction.atomic():
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT to_char(changed_at, 'YYYY-MM-DD HH24:MI:SS TZ'), new_state, errors, warnings,
report_dict
FROM public.imports_taskhistory
WHERE imports_taskhistory.webuser_id=%s AND imports_taskhistory.task_id=%s
ORDER BY public.imports_taskhistory.changed_at ASC;
""", [request.user.id, task_id]
)
task_history_list = cursor.fetchall()
task_state_list = []
for changed_at, new_state, errors, warnings, report_dict in task_history_list:
task_state_list.append({
'changed_at': changed_at, 'new_state': new_state, 'errors': errors, 'report_dict': report_dict,
'task_id': task_id, 'warnings': warnings
})
return render(request, 'imports/task_history_page.html', {'task_state_list': task_state_list}) | python | 21 | 0.523215 | 120 | 38.502304 | 217 | starcoderdata |
package main
import (
"crypto/tls"
"crypto/x509"
"flag"
"fmt"
"io/ioutil"
"log"
"time"
"github.com/salrashid123/scratchpad/go_cert_bound_sts/grpc/echo"
"golang.org/x/net/context"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials"
healthpb "google.golang.org/grpc/health/grpc_health_v1"
sts "google.golang.org/grpc/credentials/sts"
)
const ()
var (
conn *grpc.ClientConn
)
func main() {
address := flag.String("host", "localhost:50051", "host:port of gRPC server")
tlsCA := flag.String("tlsCA", "tls-ca.crt", "CACert for server")
tlsCert := flag.String("tlsCert", "client.crt", "TLS Client Certificate")
tlsKey := flag.String("tlsKey", "client.key", "TLS Client Key")
stsaddress := flag.String("stsaddress", "https://sts.domain.com:8081/token", "STS Server address")
stsaudience := flag.String("stsaudience", "grpcs://grpc.domain.com:50051", "the audience and resource value to send to STS server")
scope := flag.String("scope", "https://www.googleapis.com/auth/cloud-platform", "scope to send to STS server")
stsCred := flag.String("stsCred", "/tmp/cred.txt", "STS Credentials (as file)")
serverName := flag.String("servername", "grpc.domain.com", "SNI for the grpcEndpoint")
stsSNIServerName := flag.String("stsSNIServerName", "sts.domain.com", "SNI for the STS Server")
flag.Parse()
var err error
caCert, err := ioutil.ReadFile(*tlsCA)
if err != nil {
log.Fatalf("did not read tlsCA: %v", err)
}
caCertPool := x509.NewCertPool()
caCertPool.AppendCertsFromPEM(caCert)
clientCerts, err := tls.LoadX509KeyPair(
*tlsCert,
*tlsKey,
)
if err != nil {
log.Fatalf("did not load keypairs: %v", err)
}
tlsConfig := tls.Config{
ServerName: *serverName,
Certificates: []tls.Certificate{clientCerts},
RootCAs: caCertPool,
}
creds := credentials.NewTLS(&tlsConfig)
stlsConfig := tls.Config{
ServerName: *stsSNIServerName,
Certificates: []tls.Certificate{clientCerts},
RootCAs: caCertPool,
}
fmt.Printf("%v", stlsConfig)
// https://github.com/grpc/grpc-go/issues/5099
stscreds, err := sts.NewCredentials(sts.Options{
TokenExchangeServiceURI: *stsaddress,
Resource: *stsaudience,
Audience: *stsaudience,
Scope: *scope,
SubjectTokenPath: *stsCred,
SubjectTokenType: "urn:ietf:params:oauth:token-type:access_token",
RequestedTokenType: "urn:ietf:params:oauth:token-type:jwt",
// TLSConfig: stlsConfig,
})
if err != nil {
log.Fatalf("unable to create TokenSource: %v", err)
}
conn, err = grpc.Dial(*address, grpc.WithTransportCredentials(creds), grpc.WithPerRPCCredentials(stscreds))
if err != nil {
log.Fatalf("did not connect: %v", err)
}
defer conn.Close()
c := echo.NewEchoServerClient(conn)
ctx := context.Background()
ctx, cancel := context.WithTimeout(ctx, 2*time.Second)
defer cancel()
resp, err := healthpb.NewHealthClient(conn).Check(ctx, &healthpb.HealthCheckRequest{Service: "echo.EchoServer"})
if err != nil {
log.Fatalf("HealthCheck failed %v", err)
}
if resp.GetStatus() != healthpb.HealthCheckResponse_SERVING {
log.Fatalf("service not in serving state: ", resp.GetStatus().String())
}
log.Printf("RPC HealthChekStatus:%v", resp.GetStatus())
r, err := c.SayHello(ctx, &echo.EchoRequest{Name: "unary RPC msg "})
if err != nil {
log.Fatalf("could not greet: %v", err)
}
time.Sleep(1 * time.Second)
log.Printf("RPC Response: %s", r)
} | go | 13 | 0.682549 | 132 | 27.793388 | 121 | starcoderdata |
using System.Collections.Generic;
using UnityEngine;
[CreateAssetMenu(fileName = "SheetSetting", menuName = "3Q/Sheet Setting", order = 1)]
public class SheetSetting : ScriptableObject
{
/*public string spreadSheetKey = "";
public string startCell = "A1";
public string endCell = "Z100";
public List SheetNames = new List
public List GoogleSheets;
}
[System.Serializable]
public class SheetConfig
{
public string spreadSheetKey = "";
[HideInInspector] public bool isExpand = false;
public List sheetNames = new List
}
[System.Serializable]
public class SheetName
{
public string startCell = "A1";
public string endCell = "Z100";
public bool buildText;
public string name;
} | c# | 10 | 0.712102 | 86 | 25.2 | 30 | starcoderdata |
import { getAccessToken } from './sign-in';
export const APPS_REQUEST = 'APPS_REQUEST';
export const APPS_SUCCESS = 'APPS_SUCCESS';
export const APPS_ERROR = 'APPS_ERROR';
function appsRequesting() {
return { type: APPS_REQUEST };
}
function appsSuccess(payload) {
return { type: APPS_SUCCESS, payload };
}
function appsError() {
return { type: APPS_ERROR };
}
export function fetchApps() {
return async function (dispatch) {
dispatch(appsRequesting());
const response = await fetch('https://guarded-thicket-22918.herokuapp.com/apps', {
headers: {
'Authorization': getAccessToken(),
'Content-Type': 'application/json'
}
});
if (!response.ok) return dispatch(appsError());
const { apps } = await response.json();
return dispatch(appsSuccess(apps));
}
} | javascript | 16 | 0.62035 | 90 | 23.72973 | 37 | starcoderdata |
#### We use argparse for processing command line arguments, random for shuffling our data, sys for flushing output, and numpy for handling vectors of data.
# DyNet Implementation
import argparse
import random
import sys
import numpy as np
#### Typically, we would make many of these constants command line arguments and tune using the development set. For simplicity, I have fixed their values here to match Jiang, Liang and Zhang (CoLing 2018).
PAD = "__PAD__"
UNK = "__UNK__"
DIM_EMBEDDING = 100 # DIM_EMBEDDING - number of dimensions in our word embeddings.
LSTM_HIDDEN = 100 # LSTM_HIDDEN - number of dimensions in the hidden vectors for the LSTM. Based on NCRFpp (200 in the paper, but 100 per direction in code)
BATCH_SIZE = 10 # BATCH_SIZE - number of examples considered in each model update.
LEARNING_RATE = 0.015 # LEARNING_RATE - adjusts how rapidly model parameters change by rescaling the gradient vector.
LEARNING_DECAY_RATE = 0.05 # LEARNING_DECAY_RATE - part of a rescaling of the learning rate after each pass through the data.
EPOCHS = 100 # EPOCHS - number of passes through the data in training.
KEEP_PROB = 0.5 # KEEP_PROB - probability of keeping a value when applying dropout.
GLOVE = "../data/glove.6B.100d.txt" # GLOVE - location of glove vectors.
WEIGHT_DECAY = 1e-8 # WEIGHT_DECAY - part of a rescaling of weights when an update occurs.
#### Dynet library imports. The first allows us to configure DyNet from within code rather than on the command line: mem is the amount of system memory initially allocated (DyNet has its own memory management), autobatch toggles automatic parallelisation of computations, weight_decay rescales weights by (1 - decay) after every update, random_seed sets the seed for random number generation.
import dynet_config
dynet_config.set(mem=256, autobatch=0, weight_decay=WEIGHT_DECAY,random_seed=0)
# dynet_config.set_gpu() for when we want to run with GPUs
import dynet as dy
####
# Data reading
def read_data(filename):
#### We are expecting a minor variation on the raw Penn Treebank data, with one line per sentence, tokens separated by spaces, and the tag for each token placed next to its word (the | works as a separator as it does not appear as a token).
"""Example input:
Pierre|NNP Vinken|NNP ,|, 61|CD years|NNS old|JJ
"""
content = []
with open(filename) as data_src:
for line in data_src:
t_p = [w.split("|") for w in line.strip().split()]
tokens = [v[0] for v in t_p]
tags = [v[1] for v in t_p]
content.append((tokens, tags))
return content
def simplify_token(token):
chars = []
for char in token:
#### Reduce sparsity by replacing all digits with 0.
if char.isdigit():
chars.append("0")
else:
chars.append(char)
return ''.join(chars)
def main():
#### For the purpose of this example we only have arguments for locations of the data.
parser = argparse.ArgumentParser(description='POS tagger.')
parser.add_argument('training_data')
parser.add_argument('dev_data')
args = parser.parse_args()
train = read_data(args.training_data)
dev = read_data(args.dev_data)
#### These indices map from strings to integers, which we apply to the input for our model. UNK is added to our mapping so that there is a vector we can use when we encounter unknown words. The special PAD symbol is used in PyTorch and Tensorflow as part of shaping the data in a batch to be a consistent size. It is not needed for DyNet, but kept for consistency.
# Make indices
id_to_token = [PAD, UNK]
token_to_id = {PAD: 0, UNK: 1}
id_to_tag = [PAD]
tag_to_id = {PAD: 0}
#### The '+ dev' may seem like an error, but is done here for convenience. It means in the next section we will retain the GloVe embeddings that appear in dev but not train. They won't be updated during training, so it does not mean we are getting information we shouldn't. In practise I would simply keep all the GloVe embeddings to avoid any potential incorrect use of the evaluation data.
for tokens, tags in train + dev:
for token in tokens:
token = simplify_token(token)
if token not in token_to_id:
token_to_id[token] = len(token_to_id)
id_to_token.append(token)
for tag in tags:
if tag not in tag_to_id:
tag_to_id[tag] = len(tag_to_id)
id_to_tag.append(tag)
NWORDS = len(token_to_id)
NTAGS = len(tag_to_id)
# Load pre-trained GloVe vectors
#### I am assuming these are 100-dimensional GloVe embeddings in their standard format.
pretrained = {}
for line in open(GLOVE):
parts = line.strip().split()
word = parts[0]
vector = [float(v) for v in parts[1:]]
pretrained[word] = vector
#### We need the word vectors as a list to initialise the embeddings. Each entry in the list corresponds to the token with that index.
pretrained_list = []
scale = np.sqrt(3.0 / DIM_EMBEDDING)
for word in id_to_token:
# apply lower() because all GloVe vectors are for lowercase words
if word.lower() in pretrained:
pretrained_list.append(np.array(pretrained[word.lower()]))
else:
#### For words that do not appear in GloVe we generate a random vector (note, the choice of scale here is important and we follow Jiang, Liang and Zhang (CoLing 2018).
random_vector = np.random.uniform(-scale, scale, [DIM_EMBEDDING])
pretrained_list.append(random_vector)
#### The most significant difference between the frameworks is how the model parameters and their execution is defined. In DyNet we define parameters here and then define computation as needed. In PyTorch we use a class with the parameters defined in the constructor and the computation defined in the forward() method. In Tensorflow we define both parameters and computation here.
# Model creation
####
model = dy.ParameterCollection()
# Create word embeddings and initialise
#### Lookup parameters are a matrix that supports efficient sparse lookup.
pEmbedding = model.add_lookup_parameters((NWORDS, DIM_EMBEDDING))
pEmbedding.init_from_array(np.array(pretrained_list))
# Create LSTM parameters
#### Objects that create LSTM cells and the necessary parameters.
stdv = 1.0 / np.sqrt(LSTM_HIDDEN) # Needed to match PyTorch
f_lstm = dy.VanillaLSTMBuilder(1, DIM_EMBEDDING, LSTM_HIDDEN, model,
forget_bias=(np.random.random_sample() - 0.5) * 2 * stdv)
b_lstm = dy.VanillaLSTMBuilder(1, DIM_EMBEDDING, LSTM_HIDDEN, model,
forget_bias=(np.random.random_sample() - 0.5) * 2 * stdv)
# Create output layer
pOutput = model.add_parameters((NTAGS, 2 * LSTM_HIDDEN))
# Set recurrent dropout values (not used in this case)
f_lstm.set_dropouts(0.0, 0.0)
b_lstm.set_dropouts(0.0, 0.0)
# Initialise LSTM parameters
#### To match PyTorch, we initialise the parameters with an unconventional approach.
f_lstm.get_parameters()[0][0].set_value(
np.random.uniform(-stdv, stdv, [4 * LSTM_HIDDEN, DIM_EMBEDDING]))
f_lstm.get_parameters()[0][1].set_value(
np.random.uniform(-stdv, stdv, [4 * LSTM_HIDDEN, LSTM_HIDDEN]))
f_lstm.get_parameters()[0][2].set_value(
np.random.uniform(-stdv, stdv, [4 * LSTM_HIDDEN]))
b_lstm.get_parameters()[0][0].set_value(
np.random.uniform(-stdv, stdv, [4 * LSTM_HIDDEN, DIM_EMBEDDING]))
b_lstm.get_parameters()[0][1].set_value(
np.random.uniform(-stdv, stdv, [4 * LSTM_HIDDEN, LSTM_HIDDEN]))
b_lstm.get_parameters()[0][2].set_value(
np.random.uniform(-stdv, stdv, [4 * LSTM_HIDDEN]))
#### The trainer object is used to update the model.
# Create the trainer
trainer = dy.SimpleSGDTrainer(model, learning_rate=LEARNING_RATE)
#### DyNet clips gradients by default, which we disable here (this can have a big impact on performance).
trainer.set_clip_threshold(-1)
#### To make the code match across the three versions, we group together some framework specific values needed when doing a pass over the data.
expressions = (pEmbedding, pOutput, f_lstm, b_lstm, trainer)
#### Main training loop, in which we shuffle the data, set the learning rate, do one complete pass over the training data, then evaluate on the development data.
for epoch in range(EPOCHS):
random.shuffle(train)
####
# Update learning rate
trainer.learning_rate = LEARNING_RATE / (1+ LEARNING_DECAY_RATE * epoch)
#### Training pass.
loss, tacc = do_pass(train, token_to_id, tag_to_id, expressions, True)
#### Dev pass.
_, dacc = do_pass(dev, token_to_id, tag_to_id, expressions, False)
print("{} loss {} t-acc {} d-acc {}".format(epoch, loss, tacc, dacc))
#### The syntax varies, but in all three cases either saving or loading the parameters of a model must be done after the model is defined.
# Save model
model.save("tagger.dy.model")
# Load model
model.populate("tagger.dy.model")
# Evaluation pass.
_, test_acc = do_pass(dev, token_to_id, tag_to_id, expressions, False)
print("Test Accuracy: {:.3f}".format(test_acc))
#### Inference (the same function for train and test).
def do_pass(data, token_to_id, tag_to_id, expressions, train):
pEmbedding, pOutput, f_lstm, b_lstm, trainer = expressions
# Loop over batches
loss = 0
match = 0
total = 0
for start in range(0, len(data), BATCH_SIZE):
#### Form the batch and order it based on length (important for efficient processing in PyTorch).
batch = data[start : start + BATCH_SIZE]
batch.sort(key = lambda x: -len(x[0]))
#### Log partial results so we can conveniently check progress.
if start % 4000 == 0 and start > 0:
print(loss, match / total)
sys.stdout.flush()
#### Start a new computation graph for this batch.
# Process batch
dy.renew_cg()
#### For each example, we will construct an expression that gives the loss.
loss_expressions = []
predicted = []
#### Convert tokens and tags from strings to numbers using the indices.
for n, (tokens, tags) in enumerate(batch):
token_ids = [token_to_id.get(simplify_token(t), 0) for t in tokens]
tag_ids = [tag_to_id[t] for t in tags]
#### Now we define the computation to be performed with the model. Note that they are not applied yet, we are simply building the computation graph.
# Look up word embeddings
wembs = [dy.lookup(pEmbedding, w) for w in token_ids]
# Apply dropout
if train:
wembs = [dy.dropout(w, 1.0 - KEEP_PROB) for w in wembs]
# Feed words into the LSTM
#### Create an expression for two LSTMs and feed in the embeddings (reversed in one case).
#### We pull out the output vector from the cell state at each step.
f_init = f_lstm.initial_state()
f_lstm_output = [x.output() for x in f_init.add_inputs(wembs)]
rev_embs = reversed(wembs)
b_init = b_lstm.initial_state()
b_lstm_output = [x.output() for x in b_init.add_inputs(rev_embs)]
# For each output, calculate the output and loss
pred_tags = []
for f, b, t in zip(f_lstm_output, reversed(b_lstm_output), tag_ids):
# Combine the outputs
combined = dy.concatenate([f,b])
# Apply dropout
if train:
combined = dy.dropout(combined, 1.0 - KEEP_PROB)
# Matrix multiply to get scores for each tag
r_t = pOutput * combined
# Calculate cross-entropy loss
if train:
err = dy.pickneglogsoftmax(r_t, t)
#### We are not actually evaluating the loss values here, instead we collect them together in a list. This enables DyNet's <a href="http://dynet.readthedocs.io/en/latest/tutorials_notebooks/Autobatching.html">autobatching
loss_expressions.append(err)
# Calculate the highest scoring tag
#### This call to .npvalue() will lead to evaluation of the graph and so we don't actually get the benefits of autobatching. With some refactoring we could get the benefit back (simply keep the r_t expressions around and do this after the update), but that would have complicated this code.
chosen = np.argmax(r_t.npvalue())
pred_tags.append(chosen)
predicted.append(pred_tags)
# combine the losses for the batch, do an update, and record the loss
if train:
loss_for_batch = dy.esum(loss_expressions)
loss_for_batch.backward()
trainer.update()
loss += loss_for_batch.scalar_value()
####
# Update the number of correct tags and total tags
for (_, g), a in zip(batch, predicted):
total += len(g)
for gt, at in zip(g, a):
gt = tag_to_id[gt]
if gt == at:
match += 1
return loss, match / total
if __name__ == '__main__':
main() | python | 16 | 0.646133 | 395 | 51.428016 | 257 | starcoderdata |
def read_json(cls, file_path, default_value=None):
"""
Read the file and parse the content for json object
:param file_path:
:param default_value:
:return:
"""
txt = IOFunc.read_file(file_path, None)
if not txt:
return default_value
try:
return json.loads(txt)
except ValueError as ex:
print('Error: %s' % ex)
return default_value | python | 11 | 0.508368 | 59 | 24.666667 | 18 | inline |
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Models\Tipocontribuyente;
class TipocontribuyenteController extends Controller
{
public function indexByCliente(){
$tipo = Tipocontribuyente::where("tipo","C")->get();
return response()->json($tipo);
}
public function indexByProveedor(){
$tipo = Tipocontribuyente::where("tipo","P")->get();
return response()->json($tipo);
}
} | php | 12 | 0.666667 | 60 | 24.166667 | 18 | starcoderdata |
def __init__(self):
# retrieve labels for state
self.state_x_label = builder.get_object("state_x")
self.state_y_label = builder.get_object("state_y")
self.state_z_label = builder.get_object("state_z")
self.state_phi_label = builder.get_object("state_phi")
self.state_theta_label = builder.get_object("state_theta")
self.state_psi_label = builder.get_object("state_psi")
self.state_x_dot_label = builder.get_object("state_x_dot")
self.state_y_dot_label = builder.get_object("state_y_dot")
self.state_z_dot_label = builder.get_object("state_z_dot")
self.state_p_label = builder.get_object("state_p")
self.state_q_label = builder.get_object("state_q")
self.state_r_label = builder.get_object("state_r")
# refresh those values every 50 ms
GLib.timeout_add(50, self.updateState)
self.updateState() | python | 8 | 0.707879 | 60 | 44.888889 | 18 | inline |
//Requires
const modulename = 'WebServer:updateChecker';
const axios = require("axios");
const { dir, log, logOk, logWarn, logError } = require('../extras/console')(modulename);
//Helpers
const now = () => { return Math.round(Date.now() / 1000) };
const anyUndefined = (...args) => { return [...args].some(x => (typeof x === 'undefined')) };
/*
TODO:
Create an page with the changelog, that queries for the following endpoint and caches it for 15 minutes:
https://changelogs-live.fivem.net/api/changelog/versions/2385/2375?tag=server
Maybe even grab the data from commits:
https://changelogs-live.fivem.net/api/changelog/versions/2077
*/
module.exports = async () => {
try {
//perform request - cache busting every ~1.4h
let osTypeApiUrl = (GlobalData.osType == 'windows')? 'win32' : 'linux';
let cacheBuster = Math.floor(now() / 5e3);
let reqUrl = `https://changelogs-live.fivem.net/api/changelog/versions/${osTypeApiUrl}/server?${cacheBuster}`;
let changelogReq = await axios.get(reqUrl);
//check response
if(!changelogReq.data) throw new Error('request failed');
changelog = changelogReq.data;
if(anyUndefined(changelog.recommended, changelog.optional, changelog.latest, changelog.critical)){
throw new Error('expected values not found');
}
if(GlobalData.verbose) log(`Checked for updates. Latest version is ${changelog.latest}`);
//FIXME: CHECK FOR BROKEN ORDER
//fill in databus
let osTypeRepoUrl = (GlobalData.osType == 'windows')? 'server_windows' : 'proot_linux';
globals.databus.updateChecker = {
artifactsLink: `https://runtime.fivem.net/artifacts/fivem/build_${osTypeRepoUrl}/master/?${cacheBuster}`,
recommended: parseInt(changelog.recommended),
optional: parseInt(changelog.optional),
latest: parseInt(changelog.latest),
critical: parseInt(changelog.critical),
}
} catch (error) {
if(GlobalData.verbose) logWarn(`Failed to retrieve FXServer update data with error: ${error.message}`);
if(globals.databus.updateChecker === null) globals.databus.updateChecker = false;
}
} | javascript | 17 | 0.659658 | 118 | 45.591837 | 49 | starcoderdata |
package io.sphere.sdk.categories;
import java.util.function.Predicate;
class CategoryHasParentPredicate implements Predicate {
@Override
public boolean test(final Category category){
return category.getParent() != null;
}
} | java | 9 | 0.745098 | 65 | 24.6 | 10 | starcoderdata |
"""
Can be imported from `gin` to add directory and `grax/projects` to gin search path.
Example config file:
```gin
import grax.config
include "grax_config/single/fit.gin"
include "gat/configs/pubmed.gin"
```
"""
import os
import gin
base_dir = os.path.dirname(__file__)
for path in base_dir, os.path.join(base_dir, "projects"):
gin.add_config_file_search_path(path) | python | 6 | 0.70844 | 83 | 18.55 | 20 | starcoderdata |
'use strict';
const inquirer = require('inquirer');
const writeActionFile = require('./writeActionFile');
const starterPrompt = [
{
type: 'input',
name: 'owner',
message: 'Enter github user id:',
},
{
type: 'input',
name: 'repo',
message: 'Enter the repository name:',
},
];
module.exports = async function () {
let answers = await inquirer.prompt(starterPrompt);
const { owner, repo } = answers;
require('dotenv').config();
const { Octokit } = require('@octokit/rest');
const octokit = new Octokit({ auth: process.env.GH_PAT });
const response = await octokit.repos.getContent({
owner,
repo,
path: '.github/workflows',
});
const choosePrompt = {
type: 'checkbox',
name: 'workflows',
message: 'Choose workflow',
choices: response.data,
};
answers = await inquirer.prompt(choosePrompt);
const { workflows } = answers;
workflows.forEach((workflow) => {
octokit.repos
.getContent({
owner,
repo,
path: `.github/workflows/${workflow}`,
})
.then((response) => {
const ymlData = Buffer.from(response.data.content, 'base64');
console.log(ymlData.toString());
writeActionFile(workflow, ymlData);
});
});
}; | javascript | 23 | 0.611154 | 69 | 22.053571 | 56 | starcoderdata |
package net.kemitix.thorp.domain;
import net.kemitix.mon.TypeAlias;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class Sources extends TypeAlias {
private Sources(List value) { super(value); }
public static final Sources emptySources = new Sources(Collections.emptyList());
public static Sources create(List paths) {
return new Sources(paths);
}
public List paths() {
return new ArrayList<>(getValue());
}
public Path forPath(Path path) {
return getValue().stream()
.filter(path::startsWith)
.findFirst()
.orElseThrow(() ->
new RuntimeException(
"Path is not within any known source"));
}
public Sources append(Path path) {
return append(Collections.singletonList(path));
}
public Sources append(List paths) {
List collected = new ArrayList<>();
collected.addAll(getValue());
collected.addAll(paths);
return Sources.create(collected);
}
} | java | 10 | 0.635285 | 93 | 33.162162 | 37 | starcoderdata |
func TestBigBlob(t *testing.T) {
env := solo.New(t, false, false)
ch := env.NewChain(nil, "chain1")
// uploada blob that is too big
bigblobSize := governance.DefaultMaxBlobSize + 100
blobBin := make([]byte, bigblobSize)
_, err := ch.UploadWasm(ch.OriginatorKeyPair, blobBin)
require.Error(t, err)
// update max blob size to allow for bigger blobs_
_, err = ch.PostRequestSync(
solo.NewCallParams(
governance.Contract.Name, governance.FuncSetChainInfo.Name,
governance.ParamMaxBlobSize, bigblobSize,
).WithIotas(1),
nil,
)
require.NoError(t, err)
// blob upload must now succeed
_, err = ch.UploadWasm(ch.OriginatorKeyPair, blobBin)
require.NoError(t, err)
} | go | 13 | 0.717613 | 62 | 26.52 | 25 | inline |
// Fill out your copyright notice in the Description page of Project Settings.
#pragma once
#include "CoreMinimal.h"
#include "SlateCore/Public/Widgets/SPanel.h"
#include "Styling/SlateTypes.h"
#include "Misc/Attribute.h"
#include "Layout/Visibility.h"
#include "Layout/Margin.h"
#include "Layout/Geometry.h"
#include "Widgets/DeclarativeSyntaxSupport.h"
#include "SlotBase.h"
#include "Widgets/SWidget.h"
#include "Layout/Children.h"
#include "Widgets/SPanel.h"
#include "Widgets/Layout/Anchors.h"
#include "UMGExtensionDefine.h"
#include "Input/Reply.h"
#include "Framework/SlateDelegates.h"
struct FGeometry;
struct FPointerEvent;
struct FKeyEvent;
DECLARE_DELEGATE_OneParam(FOnToggleCheckStateChanged, ECheckBoxState);
/**
*
*/
class UMGEXTENTIONSAMPLE_API SMyToggle : public SPanel
{
public:
class FSlot : public TSlotBase
{
public:
/** Offset */
TAttribute OffsetAttr;
/** Anchors */
TAttribute AnchorsAttr;
/** Size */
TAttribute AlignmentAttr;
/** Auto-Size */
TAttribute AutoSizeAttr;
/** Z-Order */
TAttribute ZOrderAttr;
/** StateBelonged */
TAttribute SlotTypeAttr;
FSlot()
: TSlotBase
, OffsetAttr(FMargin(0, 0, 1, 1))
, AnchorsAttr(FAnchors(0.0f, 0.0f))
, AlignmentAttr(FVector2D(0.5f, 0.5f))
, AutoSizeAttr(false)
, ZOrderAttr(0)
, SlotTypeAttr(EToggleSlotType::Other)
{
}
FSlot& Offset(const TAttribute InOffset)
{
OffsetAttr = InOffset;
return *this;
}
FSlot& Anchors(const TAttribute InAnchors)
{
AnchorsAttr = InAnchors;
return *this;
}
FSlot& Alignment(const TAttribute InAlignment)
{
AlignmentAttr = InAlignment;
return *this;
}
FSlot& AutoSize(const TAttribute InAutoSize)
{
AutoSizeAttr = InAutoSize;
return *this;
}
FSlot& ZOrder(const TAttribute InZOrder)
{
ZOrderAttr = InZOrder;
return *this;
}
FSlot& Expose(FSlot*& OutVarToInit)
{
OutVarToInit = this;
return *this;
}
FSlot& SlotType(const TAttribute InSlotType)
{
SlotTypeAttr = InSlotType;
return *this;
}
};
public:
SMyToggle();
SLATE_BEGIN_ARGS(SMyToggle)
: _IsToggleChecked(ECheckBoxState::Unchecked)
, _IsFocusable(true)
{
}
SLATE_SUPPORTS_SLOT(SMyToggle::FSlot)
SLATE_ATTRIBUTE(ECheckBoxState, IsToggleChecked)
SLATE_ARGUMENT(bool, IsFocusable)
SLATE_ATTRIBUTE(EButtonClickMethod::Type, ClickMethod)
SLATE_EVENT(FOnToggleCheckStateChanged, OnToggleCheckStateChanged)
SLATE_EVENT(FOnGetContent, OnGetMenuContent)
SLATE_END_ARGS()
void Construct(const FArguments& InArgs);
static FSlot& Slot()
{
return *(new FSlot());
}
FSlot& AddSlot()
{
Invalidate(EInvalidateWidget::Layout);
SMyToggle::FSlot& slot = *(new FSlot());
this->Children.Add(&slot);
return slot;
}
void SetToggleIsChecked(TAttribute InIsToggleChecked);
int32 RemoveSlot(const TSharedRef SlotWidget);
void ClearChildren();
bool IsPressed() const
{
return bIsPressed;
}
void ToggleCheckedState();
public:
// Begin SWidget overrides
virtual void OnArrangeChildren( const FGeometry& AllottedGeometry, FArrangedChildren& ArrangedChildren ) const override;
virtual int32 OnPaint( const FPaintArgs& Args, const FGeometry& AllottedGeometry, const FSlateRect& MyCullingRect, FSlateWindowElementList& OutDrawElements, int32 LayerId, const FWidgetStyle& InWidgetStyle, bool bParentEnabled ) const override;
virtual FChildren* GetChildren() override
{
return &Children;
}
virtual bool SupportsKeyboardFocus() const override;
virtual FReply OnKeyUp(const FGeometry& MyGeometry, const FKeyEvent& InKeyEvent) override;
virtual FReply OnMouseButtonDown(const FGeometry& MyGeometry, const FPointerEvent& MouseEvent) override;
virtual FReply OnMouseButtonDoubleClick(const FGeometry& InMyGeometry, const FPointerEvent& InMouseEvent) override;
virtual FReply OnMouseButtonUp(const FGeometry& MyGeometry, const FPointerEvent& MouseEvent) override;
virtual void OnMouseEnter(const FGeometry& MyGeometry, const FPointerEvent& MouseEvent) override;
virtual void OnMouseLeave(const FPointerEvent& MouseEvent) override;
virtual bool IsInteractable() const override;
// End SWidget overrides
protected:
// Begin SWidget overrides.
virtual FVector2D ComputeDesiredSize(float) const override;
// End SWidget overrides.
private:
typedef TArray<bool, TInlineAllocator FArrangedChildLayers;
void ArrangeLayeredChildren(const FGeometry& AllottedGeometry, FArrangedChildren& ArrangedChildren, FArrangedChildLayers& ArrangedChildLayers) const;
bool IsSameWithCheckState(const EToggleSlotType& SlotType) const;
protected:
TPanelChildren Children;
TAttribute IsToggleChecked;
FOnToggleCheckStateChanged OnToggleCheckStateChanged;
FOnGetContent OnGetMenuContent;
EButtonClickMethod::Type ClickMethod;
bool bIsFocusable;
bool bIsPressed;
}; | c | 19 | 0.735192 | 248 | 26.333333 | 189 | starcoderdata |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.Serialization.Formatters.Binary;
using Newtonsoft.Json;
using WFFM.ConversionTool.Library.Models.Metadata;
using WFFM.ConversionTool.Library.Repositories;
namespace WFFM.ConversionTool.Library.Providers
{
public class MetadataProvider : IMetadataProvider
{
private AppSettings _appSettings;
private IDestMasterRepository _destMasterRepository;
private List _metadataTemplates = new List
private string[] _metadataFiles;
public MetadataProvider(AppSettings appSettings, IDestMasterRepository destMasterRepository)
{
_appSettings = appSettings;
_destMasterRepository = destMasterRepository;
_metadataFiles = GetMetadataFileList();
foreach (string filePath in _metadataFiles)
{
var metadataTemplate = GetItemMetadataByFilePath(filePath);
if (destMasterRepository.ItemExists(metadataTemplate.destTemplateId))
{
_metadataTemplates.Add(metadataTemplate);
}
}
}
public MetadataTemplate GetItemMetadataByTemplateId(Guid templateId)
{
var newMetadataTemplates = DeepCopy(_metadataTemplates);
return newMetadataTemplates.FirstOrDefault(m => m.sourceTemplateId == templateId || m.destTemplateId == templateId);
}
public MetadataTemplate GetItemMetadataByTemplateName(string templateName)
{
var newMetadataTemplates = DeepCopy(_metadataTemplates);
return newMetadataTemplates.FirstOrDefault(m => string.Equals(m.sourceTemplateName, templateName, StringComparison.InvariantCultureIgnoreCase)
|| string.Equals(m.destTemplateName, templateName, StringComparison.InvariantCultureIgnoreCase));
}
public MetadataTemplate GetItemMetadataBySourceMappingFieldValue(string mappingValue)
{
var newMetadataTemplates = DeepCopy(_metadataTemplates);
return newMetadataTemplates.FirstOrDefault(m => string.Equals(m.sourceMappingFieldValue, mappingValue, StringComparison.InvariantCultureIgnoreCase));
}
public string[] GetAllMetadataFiles()
{
return _metadataFiles;
}
private string[] GetMetadataFileList()
{
var metadataDirPath = _appSettings.metadataFolderRelativePath;
return Directory.GetFiles(metadataDirPath, "*.json", SearchOption.AllDirectories);
}
private MetadataTemplate GetItemMetadataByFilePath(string filePath)
{
// Read json file
var itemMeta = System.IO.File.ReadAllText(filePath);
// Deserialize Json to Object
MetadataTemplate metadataTemplate = JsonConvert.DeserializeObject
if (string.IsNullOrEmpty(metadataTemplate.baseTemplateMetadataFileName)) return metadataTemplate;
var baseTemplateMetadataFilePath = _metadataFiles.FirstOrDefault(f => GetFileName(f).Equals(metadataTemplate.baseTemplateMetadataFileName, StringComparison.InvariantCultureIgnoreCase));
var fullMetadataTemplate = MergeBaseMetadataTemplate(metadataTemplate, baseTemplateMetadataFilePath);
return fullMetadataTemplate;
}
private string GetFileName(string filePath)
{
var filename = filePath.Split('\\').Last();
return filename;
}
private MetadataTemplate MergeBaseMetadataTemplate(MetadataTemplate metadataTemplate, string baseTemplateMetadataFilePath)
{
if (string.IsNullOrEmpty(baseTemplateMetadataFilePath)) return metadataTemplate;
MetadataTemplate baseTemplateMeta = GetItemMetadataByFilePath(baseTemplateMetadataFilePath);
if (baseTemplateMeta == null) return metadataTemplate;
// Iterate merging if baseTemplate filename is not null
if (!string.IsNullOrEmpty(baseTemplateMeta.baseTemplateMetadataFileName))
{
var filePath = _metadataFiles.FirstOrDefault(f => f.IndexOf(baseTemplateMeta.baseTemplateMetadataFileName, StringComparison.InvariantCultureIgnoreCase) > -1);
baseTemplateMeta = MergeBaseMetadataTemplate(baseTemplateMeta, filePath);
}
// Merge Fields
metadataTemplate.fields = MergeFields(baseTemplateMeta.fields, metadataTemplate.fields);
// Merge Descendant Items
metadataTemplate.descendantItems =
MergeDescendantItems(baseTemplateMeta.descendantItems, metadataTemplate.descendantItems);
// Override dataValueType
if (string.IsNullOrEmpty(metadataTemplate.dataValueType))
{
metadataTemplate.dataValueType = baseTemplateMeta.dataValueType;
}
// Override dataValueConverter
if (string.IsNullOrEmpty(metadataTemplate.dataValueConverter))
{
metadataTemplate.dataValueConverter = baseTemplateMeta.dataValueConverter;
}
return metadataTemplate;
}
private List MergeDescendantItems(
List baseDescendantItems, List metaDescendantItems)
{
if (baseDescendantItems != null)
{
if (metaDescendantItems != null)
{
foreach (var baseDescendantItem in baseDescendantItems)
{
if (!metaDescendantItems.Any(f =>
string.Equals(f.itemName, baseDescendantItem.itemName, StringComparison.InvariantCultureIgnoreCase)
&& string.Equals(f.destTemplateName, baseDescendantItem.destTemplateName,
StringComparison.InvariantCultureIgnoreCase)
&& string.Equals(f.parentItemName, baseDescendantItem.parentItemName)
&& f.isParentChild == baseDescendantItem.isParentChild))
{
metaDescendantItems.Add(baseDescendantItem);
}
}
}
else
{
return baseDescendantItems;
}
}
return metaDescendantItems;
}
private MetadataTemplate.MetadataFields MergeFields(MetadataTemplate.MetadataFields baseFields,
MetadataTemplate.MetadataFields metaFields)
{
// Add base fields
if (baseFields.newFields != null)
{
if (metaFields.newFields != null)
{
foreach (var newField in baseFields.newFields)
{
if (metaFields.newFields.All(f => f.destFieldId != newField.destFieldId))
{
metaFields.newFields.Add(newField);
}
}
}
else
{
metaFields.newFields = baseFields.newFields;
}
}
if (baseFields.convertedFields != null)
{
if (metaFields.convertedFields != null)
{
foreach (var convertedField in baseFields.convertedFields)
{
// Check if metadataTemplate contains it already
var metaConvertedField = metaFields.convertedFields.FirstOrDefault(cf => cf.sourceFieldId == convertedField.sourceFieldId);
if (metaConvertedField != null)
{
if (metaConvertedField.destFields != null && metaConvertedField.destFields.Any())
{
foreach (var convertedFieldDestField in convertedField.destFields)
{
if (metaConvertedField.destFields.All(df => !string.Equals(df.sourceElementName, convertedFieldDestField.sourceElementName, StringComparison.InvariantCultureIgnoreCase)))
{
metaConvertedField.destFields.Add(convertedFieldDestField);
}
}
}
else if (metaConvertedField.destFieldId == null)
{
metaConvertedField.destFields = convertedField.destFields;
}
}
else
{
metaFields.convertedFields.Add(convertedField);
}
}
}
else
{
metaFields.convertedFields = baseFields.convertedFields;
}
}
if (baseFields.existingFields != null)
{
if (metaFields.existingFields != null)
{
foreach (var newField in baseFields.existingFields)
{
if (metaFields.existingFields.All(f => f.fieldId != newField.fieldId))
{
metaFields.existingFields.Add(newField);
}
}
}
else
{
metaFields.existingFields = baseFields.existingFields;
}
}
return metaFields;
}
public T DeepCopy item)
{
BinaryFormatter formatter = new BinaryFormatter();
MemoryStream stream = new MemoryStream();
formatter.Serialize(stream, item);
stream.Seek(0, SeekOrigin.Begin);
T result = (T)formatter.Deserialize(stream);
stream.Close();
return result;
}
}
} | c# | 29 | 0.719606 | 188 | 32.400826 | 242 | starcoderdata |
package bio.terra.workspace.service.resource.referenced;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
public class ReferencedDataRepoSnapshotAttributes {
private final String instanceName;
private final String snapshotId;
@JsonCreator
public ReferencedDataRepoSnapshotAttributes(
@JsonProperty("instanceName") String instanceName,
@JsonProperty("snapshotId") String snapshotId) {
this.instanceName = instanceName;
this.snapshotId = snapshotId;
}
public String getInstanceName() {
return instanceName;
}
public String getSnapshotId() {
return snapshotId;
}
} | java | 10 | 0.771982 | 56 | 25.84 | 25 | starcoderdata |
void RemoveFewConstraints::buildSubproblems()
{
subproblems.emplace_back(alive.size(), instance);
VertexColorer& subproblem = subproblems.front();
for (const VertexColorer::Link& link : instance.constraintIterable())
{
const uint32_t a = link.first;
const uint32_t b = link.second;
if (!toBePostProcessed[a] && !toBePostProcessed[b])
subproblem.addConstraint(newIndex[a], newIndex[b]);
}
for (const VertexColorer::Link& link : instance.positiveWeightFriendshipIterable())
{
const uint32_t a = link.first;
const uint32_t b = link.second;
assert(!toBePostProcessed[a] && !toBePostProcessed[b]);
subproblem.addFriendship(link.weight, newIndex[a], newIndex[b]);
}
for (const std::vector<VertexColorer::Link>& edge : instance.groupedLinks)
{
subproblem.addNewEdge();
for (const VertexColorer::Link& link : edge)
{
const uint32_t a = link.first;
const uint32_t b = link.second;
//TODO: reintroduce this, maybe
assert(!toBePostProcessed[a] && !toBePostProcessed[b]); //This is guaranteed since no friendships are allowed in this reduction
subproblem.addOnEdge(newIndex[a], newIndex[b]);
}
}
} | c++ | 14 | 0.72232 | 130 | 33.515152 | 33 | inline |
import {
CREATE_RIDE_SUCCESS,
CREATE_RIDE_LOADING,
CREATE_RIDE_ERROR
} from "../actionTypes/createRide";
import initialState from "../store/initialState";
const createRide = (state = initialState.createRide, action) => {
switch (action.type) {
case CREATE_RIDE_SUCCESS:
return {
...state,
message: action.payload
};
case CREATE_RIDE_LOADING:
return {
...state,
loading: action.payload
};
case CREATE_RIDE_ERROR:
return {
...state,
error: action.payload
};
default:
return state;
}
};
export default createRide; | javascript | 12 | 0.596825 | 65 | 19.322581 | 31 | starcoderdata |
<?php
namespace FlatlandGame\Controller;
use FlatlandGame\Model;
use Omelettes\Controller\QuantumController;
class GamesController extends QuantumController
{
protected $addQuantumFilterClass = '';
protected $addQuantumFormClass = '';
protected $editQuantumFilterClass = '';
protected $editQuantumFormClass = '';
protected $quantumMapperClass = 'FlatlandGame\Model\GamesMapper';
protected $quantumModelClass = 'FlatlandGame\Model\Game';
public function getGameNavigationConfig(Model\Game $model)
{
$routeOptions = $model->slug ? array('slug' => $model->slug) : array('key' => $model->key);
$config = array(
array(
'label' => 'Info',
'route' => 'games/game',
'routeOptions' => $routeOptions,
),
array(
'label' => 'Forum',
'route' => 'games/game/forum',
'routeOptions' => $routeOptions,
),
array(
'label' => 'Lists',
'route' => 'games/game/lists',
'routeOptions' => $routeOptions,
),
);
return $config;
}
public function viewAction()
{
$game = $this->findRequestedModel();
if (!$game) {
return $this->redirect()->toRoute($this->getRouteName());
}
return $this->returnViewModel( array(
'model' => $game,
'crud' => $this->constructNavigation($this->getViewNavigationConfig($game)),
'modelNav' => $this->constructNavigation($this->getGameNavigationConfig($game)),
));
}
} | php | 18 | 0.650465 | 93 | 22.711864 | 59 | starcoderdata |
/*************************************************************************************
* qTESLA: an efficient post-quantum signature scheme based on the R-LWE problem
*
* Abstract: CDT constants for the Gaussian sampler
**************************************************************************************/
#ifndef CDTSAMP
#define CDTSAMP
#include
#include "params.h"
// Sigma = 22.93, 64-bit precision
#define CDT_ROWS 209
#define CDT_COLS 1
static const int64_t cdt_v[CDT_ROWS*CDT_COLS] = {
0x0000000000000000LL, // 0
0x023A1B3F94933202LL, // 1
0x06AD3C4C19410B24LL, // 2
0x0B1D1E95803CBB73LL, // 3
0x0F879D85E7AB7F6FLL, // 4
0x13EA9C5C52732915LL, // 5
0x18440933FFD2011BLL, // 6
0x1C91DFF191E15D07LL, // 7
0x20D22D0F2017900DLL, // 8
0x25031040C1E626EFLL, // 9
0x2922BEEBA163019DLL, // 10
0x2D2F866A3C5122D3LL, // 11
0x3127CE192059EF64LL, // 12
0x350A1928231CB01ALL, // 13
0x38D5082CD4FCC414LL, // 14
0x3C875A73B33ADA6BLL, // 15
0x401FEF0E67CD47D3LL, // 16
0x439DC59E3077B59CLL, // 17
0x46FFFEDA4FC0A316LL, // 18
0x4A45DCD32E9CAA91LL, // 19
0x4D6EC2F3922E5C24LL, // 20
0x507A35C1FB354670LL, // 21
0x5367DA64EA5F1C63LL, // 22
0x563775ED5B93E26ELL, // 23
0x58E8EC6B50CB95F8LL, // 24
0x5B7C3FD0B999197DLL, // 25
0x5DF18EA7664D810ELL, // 26
0x6049129F03B5CD6DLL, // 27
0x62831EF856A48427LL, // 28
0x64A01ED314BA206FLL, // 29
0x66A09363CA89DAA3LL, // 30
0x688512173EF213F5LL, // 31
0x6A4E42A8B137E138LL, // 32
0x6BFCDD302C5B888ALL, // 33
0x6D91A82DF797EAB8LL, // 34
0x6F0D7697EBA6A51DLL, // 35
0x707125ED27F05CF1LL, // 36
0x71BD9C544C184D8DLL, // 37
0x72F3C6C7FB380322LL, // 38
0x74149755088E5CC6LL, // 39
0x7521036D434271D4LL, // 40
0x761A02516A02B0CELL, // 41
0x77008B9461817A43LL, // 42
0x77D595B95BC6A0FELL, // 43
0x789A14EE338BB727LL, // 44
0x794EF9E2D7C53213LL, // 45
0x79F530BE414FE24DLL, // 46
0x7A8DA03110886732LL, // 47
0x7B1928A59B3AA79ELL, // 48
0x7B98A38CE58D06AELL, // 49
0x7C0CE2C7BAD3164ALL, // 50
0x7C76B02ADDE64EF2LL, // 51
0x7CD6CD1D13EE98F2LL, // 52
0x7D2DF24DA06E2473LL, // 53
0x7D7CCF81A5CD98B9LL, // 54
0x7DC40B76C24FB5D4LL, // 55
0x7E0443D92DE22661LL, // 56
0x7E3E0D4B91401720LL, // 57
0x7E71F37EC9C1DE8DLL, // 58
0x7EA07957CE6B9051LL, // 59
0x7ECA1921F1AF6404LL, // 60
0x7EEF44CBC73DA35BLL, // 61
0x7F10662D0574233DLL, // 62
0x7F2DDF53CDDCD427LL, // 63
0x7F480AD7DF028A76LL, // 64
0x7F5F3C324B0F66B2LL, // 65
0x7F73C018698C18A7LL, // 66
0x7F85DCD8D69F8939LL, // 67
0x7F95D2B96ED3DA10LL, // 68
0x7FA3DC55532D71BBLL, // 69
0x7FB02EFA1DDDC61ELL, // 70
0x7FBAFB038BAE76E4LL, // 71
0x7FC46C34F918B3E3LL, // 72
0x7FCCAA102B95464CLL, // 73
0x7FD3D828F7D49092LL, // 74
0x7FDA16756C11CF83LL, // 75
0x7FDF819A3A7BFE69LL, // 76
0x7FE4333332A5FEBDLL, // 77
0x7FE84217AA0DE2B3LL, // 78
0x7FEBC29AC3100A8BLL, // 79
0x7FEEC6C78F0D514ELL, // 80
0x7FF15E9914396F2ALL, // 81
0x7FF3982E4982FB97LL, // 82
0x7FF57FFA236862D1LL, // 83
0x7FF720EFD36F4850LL, // 84
0x7FF884AB61732BC7LL, // 85
0x7FF9B396CA3B383CLL, // 86
0x7FFAB50BD1DD3633LL, // 87
0x7FFB8F72BA84114BLL, // 88
0x7FFC485E115A3388LL, // 89
0x7FFCE4A3C3B92B98LL, // 90
0x7FFD6873AE755E4ALL, // 91
0x7FFDD76BD840FDA1LL, // 92
0x7FFE34AA86CE6870LL, // 93
0x7FFE82DE5CA6A885LL, // 94
0x7FFEC454ABAA26DFLL, // 95
0x7FFEFB0625FADB89LL, // 96
0x7FFF28A214B1160FLL, // 97
0x7FFF4E983945429DLL, // 98
0x7FFF6E217C168A6ALL, // 99
0x7FFF884787F2B986LL, // 100
0x7FFF9DEB70088602LL, // 101
0x7FFFAFCB7B419E48LL, // 102
0x7FFFBE882DABB8F8LL, // 103
0x7FFFCAA8A65BDA07LL, // 104
0x7FFFD49E66188754LL, // 105
0x7FFFDCC891191605LL, // 106
0x7FFFE376BC4B0583LL, // 107
0x7FFFE8EB54D33209LL, // 108
0x7FFFED5DAEE78F4ELL, // 109
0x7FFFF0FBC7A6933DLL, // 110
0x7FFFF3EBC43A9213LL, // 111
0x7FFFF64D375FC4CCLL, // 112
0x7FFFF83A354A0431LL, // 113
0x7FFFF9C83CE9BB0DLL, // 114
0x7FFFFB08FCAC61A6LL, // 115
0x7FFFFC0AF80A1A6FLL, // 116
0x7FFFFCDA127DDE76LL, // 117
0x7FFFFD8003E62E56LL, // 118
0x7FFFFE04B9BF9C5BLL, // 119
0x7FFFFE6EA82EF9BDLL, // 120
0x7FFFFEC30D64CD46LL, // 121
0x7FFFFF0629856684LL, // 122
0x7FFFFF3B6CEEE3F1LL, // 123
0x7FFFFF659E6F7BA6LL, // 124
0x7FFFFF86FAC1036ALL, // 125
0x7FFFFFA14E69EDE9LL, // 126
0x7FFFFFB60AF6ACB7LL, // 127
0x7FFFFFC65857AECFLL, // 128
0x7FFFFFD3230F314FLL, // 129
0x7FFFFFDD27BE0A17LL, // 130
0x7FFFFFE4FC86CDFFLL, // 131
0x7FFFFFEB18AA9E4CLL, // 132
0x7FFFFFEFDAB1FD73LL, // 133
0x7FFFFFF38D65D499LL, // 134
0x7FFFFFF66BD0EB8CLL, // 135
0x7FFFFFF8A4782371LL, // 136
0x7FFFFFFA5BEF7C27LL, // 137
0x7FFFFFFBAEEB0B4CLL, // 138
0x7FFFFFFCB3E55903LL, // 139
0x7FFFFFFD7C6FE192LL, // 140
0x7FFFFFFE163E99E3LL, // 141
0x7FFFFFFE8BFC2558LL, // 142
0x7FFFFFFEE5F1CE80LL, // 143
0x7FFFFFFF2A8C31FDLL, // 144
0x7FFFFFFF5EC3CD18LL, // 145
0x7FFFFFFF866F376BLL, // 146
0x7FFFFFFFA483A906LL, // 147
0x7FFFFFFFBB4780C4LL, // 148
0x7FFFFFFFCC79BEB2LL, // 149
0x7FFFFFFFD970CBE1LL, // 150
0x7FFFFFFFE3326D21LL, // 151
0x7FFFFFFFEA865AB8LL, // 152
0x7FFFFFFFF004A7C8LL, // 153
0x7FFFFFFFF420E4F9LL, // 154
0x7FFFFFFFF732B791LL, // 155
0x7FFFFFFFF97C764FLL, // 156
0x7FFFFFFFFB303DDDLL, // 157
0x7FFFFFFFFC73D5A3LL, // 158
0x7FFFFFFFFD63AA57LL, // 159
0x7FFFFFFFFE15140DLL, // 160
0x7FFFFFFFFE981196LL, // 161
0x7FFFFFFFFEF89992LL, // 162
0x7FFFFFFFFF3F9A0CLL, // 163
0x7FFFFFFFFF73BA0BLL, // 164
0x7FFFFFFFFF99EBBBLL, // 165
0x7FFFFFFFFFB5DAA0LL, // 166
0x7FFFFFFFFFCA3E7BLL, // 167
0x7FFFFFFFFFD91985LL, // 168
0x7FFFFFFFFFE3E70ALL, // 169
0x7FFFFFFFFFEBBE45LL, // 170
0x7FFFFFFFFFF16C5CLL, // 171
0x7FFFFFFFFFF587BELL, // 172
0x7FFFFFFFFFF87E7FLL, // 173
0x7FFFFFFFFFFAA108LL, // 174
0x7FFFFFFFFFFC29F5LL, // 175
0x7FFFFFFFFFFD43E8LL, // 176
0x7FFFFFFFFFFE0DD7LL, // 177
0x7FFFFFFFFFFE9E31LL, // 178
0x7FFFFFFFFFFF0530LL, // 179
0x7FFFFFFFFFFF4E88LL, // 180
0x7FFFFFFFFFFF82AALL, // 181
0x7FFFFFFFFFFFA7A6LL, // 182
0x7FFFFFFFFFFFC1D6LL, // 183
0x7FFFFFFFFFFFD458LL, // 184
0x7FFFFFFFFFFFE166LL, // 185
0x7FFFFFFFFFFFEA97LL, // 186
0x7FFFFFFFFFFFF10CLL, // 187
0x7FFFFFFFFFFFF594LL, // 188
0x7FFFFFFFFFFFF8C0LL, // 189
0x7FFFFFFFFFFFFAF7LL, // 190
0x7FFFFFFFFFFFFC83LL, // 191
0x7FFFFFFFFFFFFD96LL, // 192
0x7FFFFFFFFFFFFE56LL, // 193
0x7FFFFFFFFFFFFEDALL, // 194
0x7FFFFFFFFFFFFF36LL, // 195
0x7FFFFFFFFFFFFF75LL, // 196
0x7FFFFFFFFFFFFFA1LL, // 197
0x7FFFFFFFFFFFFFBFLL, // 198
0x7FFFFFFFFFFFFFD4LL, // 199
0x7FFFFFFFFFFFFFE2LL, // 200
0x7FFFFFFFFFFFFFECLL, // 201
0x7FFFFFFFFFFFFFF2LL, // 202
0x7FFFFFFFFFFFFFF7LL, // 203
0x7FFFFFFFFFFFFFFALL, // 204
0x7FFFFFFFFFFFFFFCLL, // 205
0x7FFFFFFFFFFFFFFDLL, // 206
0x7FFFFFFFFFFFFFFELL, // 207
0x7FFFFFFFFFFFFFFFLL, // 208
}; // cdt_v
// memory requirements:
// 512 samples: 8652 bytes
// 256 samples: 5580 bytes
// 128 samples: 4044 bytes
// 64 samples: 3276 bytes
// 32 samples: 2892 bytes
// table alone: 1672 bytes
#endif | c | 7 | 0.676045 | 87 | 30.695833 | 240 | starcoderdata |
<?php include '../includes/header.php'; ?>
<div class="container-fluid">
<header class="bgimage enmubgimage">
<a href="../index.php"><img id="LogoImg" src="../images/NMCollegeTransfer2.png" class="img-responsive" alt="Responsive Image">
<nav class="navbar navbar-inverse">
<div class="container-fluid">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#myNavbar">
<span class="icon-bar">
<span class="icon-bar">
<span class="icon-bar">
<a class="navbar-brand enmu" href="index.php">EASTERN NEW MEXICO UNIVERSITY
<?php //Begin top Nav Bar ?>
<?php require '../includes/schooltopnav.php'; ?>
<?php //End Top Nav Bar ?>
<div id="breadcrumb">
<ol class="breadcrumb">
<a href="../index.php">HOME
<a href="index.php">ENMU
ADMISSIONS
<div class="container-fluid">
<div class="textbody">
<a href="../questions.php#questions"><img class="question" src="../images/questionmark.png" alt="questionmark image">
are some important things to consider about ENMU. We have provided you with quick links to visit the ENMU webpages for more information.
<h4 id="requirements">Requirements
admission application, there is no application fee (apply online or download an application) FAFSA application and scholarship application, ACT or SAT scores (Eastern's ACT code is 2636 and the SAT code is 4299). copy of your official high school transcript or GED certificate showing your completion date.
<div class="subtitle">Send your materials to:<br />
Office of Enrollment Services<br />
ENMU Station 7<br />
1500 S Ave K<br />
Portales, NM 88130
<a href="http://www.enmu.edu/future-students/transfer/">Admission Requirements | <a href="http://www.enmu.edu/future-students/applyNow.html">Application
<h4 id="eligibility">Eligibility
need a college GPA of at least 2.0 for regular admission. If you have fewer than 30 hours of college credit, you also need a high school GPA of 2.5 or ACT of 17 or SAT of 810.
Visit: <a href="http://www.enmu.edu/future-students/faq/ ">ENMU Admissions FAQs
<h4 id="deadlines">Deadlines
should apply for admission at least 30 days before the start of your first semester, but you don't have to wait. We recommend that you apply online as soon as possible.
Visit: <a href="http://www.enmu.edu/future-students/faq/admission.shtml ">ENMU Admissions Deadlines
<h4 id="fees">Fees
to ENMU is free.
Visit: <a href="http://www.enmu.edu/future-students/faq/admission.shtml">ENMU Admissions Fees
<h4 id="visits">Campus Visits
feel the spirit of Eastern, first-hand. Be our guest for a guided tour of the campus with our President’s Ambassadors. Talk with current students to find out more about ENMU and why they chose Eastern as their college home.
During your visit, you may have the option to eat at the Ultimate Dining hall, stop by the Ground Zero Coffee Shop and get a feel for what Eastern has to offer you. You will find that Eastern is the place you’ll want to spend your college days.
Visit: <a href="http://www.enmu.edu/future-students/visit/">Schedule a Visit
<?php //Begin Side menu ?>
<?php require '../includes/sidemenu.php'; ?>
<?php //End Side menu ?>
<div class="clear">
<div class="container">
<?php include '../includes/schoolfooter.php'; ?> | php | 4 | 0.600704 | 335 | 50.95122 | 82 | starcoderdata |
//
// LWRereshAnimationDemo.h
// LWToolsDemo
//
// Created by weil on 2018/5/22.
// Copyright © 2018年 weil. All rights reserved.
//
#import
@interface LWRereshAnimationDemo : UIViewController
@end | c | 4 | 0.728 | 51 | 16.857143 | 14 | starcoderdata |
def set_dev(serial: str, mode: bool):
"""Make boot files availabe or not-available"""
if serial not in devices:
raise HTTPException(status_code=404, detail=f"Device {serial} not in database")
if mode:
o = os.system("mount /srv/tftpboot/{serial}")
else:
o = os.system("unmount /srv/tftpboot/{serial}")
if o != 0:
raise HTTPException(status_code=404, detail=f"Failed to mount/umount device")
devices[serial] = mode
return {"status": True} | python | 11 | 0.641129 | 87 | 40.416667 | 12 | inline |
package org.jeecg.modules.system.util;
import java.util.List;
import org.jeecg.common.util.oConvertUtils;
import org.jeecg.modules.system.entity.SysPermission;
/**
* @Author: scott
* @Date: 2019-04-03
*/
public class PermissionDataUtil {
/**
* 智能处理错误数据,简化用户失误操作
*
* @param permission
*/
public static SysPermission intelligentProcessData(SysPermission permission) {
if (permission == null) {
return null;
}
// 组件
if (oConvertUtils.isNotEmpty(permission.getComponent())) {
String component = permission.getComponent();
if (component.startsWith("/")) {
component = component.substring(1);
}
if (component.startsWith("views/")) {
component = component.replaceFirst("views/", "");
}
if (component.startsWith("src/views/")) {
component = component.replaceFirst("src/views/", "");
}
if (component.endsWith(".vue")) {
component = component.replace(".vue", "");
}
permission.setComponent(component);
}
// 请求URL
if (oConvertUtils.isNotEmpty(permission.getUrl())) {
String url = permission.getUrl();
if (url.endsWith(".vue")) {
url = url.replace(".vue", "");
}
if (!url.startsWith("http") && !url.startsWith("/")&&!url.trim().startsWith("{{")) {
url = "/" + url;
}
permission.setUrl(url);
}
// 一级菜单默认组件
if (0 == permission.getMenuType() && oConvertUtils.isEmpty(permission.getComponent())) {
// 一级菜单默认组件
permission.setComponent("layouts/RouteView");
}
return permission;
}
/**
* 如果没有index页面 需要new 一个放到list中
* @param metaList
*/
public static void addIndexPage(List metaList) {
boolean hasIndexMenu = false;
for (SysPermission sysPermission : metaList) {
if("".equals(sysPermission.getName())) {
hasIndexMenu = true;
break;
}
}
if(!hasIndexMenu) {
metaList.add(0,new SysPermission(true));
}
}
/**
* 判断是否授权首页
* @param metaList
* @return
*/
public static boolean hasIndexPage(List metaList){
boolean hasIndexMenu = false;
for (SysPermission sysPermission : metaList) {
if("".equals(sysPermission.getName())) {
hasIndexMenu = true;
break;
}
}
return hasIndexMenu;
}
} | java | 15 | 0.651896 | 90 | 22.042105 | 95 | starcoderdata |
package forestry.api.core;
import java.util.ArrayList;
/**
* Used mostly by hives to determine whether they can spawn at a certain
* position. Rather limited and hackish.
* @depreciated there are better ways now
*/
@Deprecated
public class GlobalManager {
/**
* @deprecated use Block.isGenMineableReplaceable(), anything that accepts
* dirt will be accepted
*/
@Deprecated
public static ArrayList dirtBlockIds = new ArrayList
/**
* @deprecated use Block.isGenMineableReplaceable(), anything that accepts
* sand will be accepted
*/
@Deprecated
public static ArrayList sandBlockIds = new ArrayList
/**
* @deprecated why is this needed?
*/
@Deprecated
public static ArrayList snowBlockIds = new ArrayList
/**
* @deprecated Ensure your block's isLeaves function returns true instead.
*/
@Deprecated
public static ArrayList leafBlockIds = new ArrayList
} | java | 8 | 0.737924 | 75 | 26.8 | 35 | starcoderdata |
""" Color based K-means"""
import numpy as np
import cv2
import os
import glob
from glob import glob
from PIL import Image
from matplotlib import pyplot as plt
import pdb
heatMap_image_path = '/Users/monjoysaha/Downloads/CT_lung_segmentation-master/check/test/'
save_path = '/Users/monjoysaha/Downloads/CT_lung_segmentation-master/check/only_GGO/'
g= glob(heatMap_image_path + "/*.png")
#
for image in g:
fname_image = os.path.basename(image)
img = cv2.imread(image)
Z = np.float32(img.reshape((-1,3)))
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
K = 4
_,labels,centers = cv2.kmeans(Z, K, None, criteria, 10, cv2.KMEANS_RANDOM_CENTERS)
labels = labels.reshape((img.shape[:-1]))
reduced = np.uint8(centers)[labels]
for i, c in enumerate(centers):
mask = cv2.inRange(labels, i, i)
mask = np.dstack([mask]*3) # Make it 3 channel
ex_img = cv2.bitwise_and(img, mask)
ex_reduced = cv2.bitwise_and(reduced, mask)
hsv = cv2.cvtColor(ex_reduced, cv2.COLOR_BGR2HSV)
lower_red = np.array([110,50,50])
upper_red = np.array([130,255,255])
mask1 = cv2.inRange(hsv, lower_red, upper_red)
#if cv2.inRange(hsv, lower_red, upper_red)
res = cv2.bitwise_and(reduced, reduced, mask= mask1)
#print(mask1)
#if res > 0:
#plt.imshow(res)
#plt.show()
#pdb.set_trace()
cv2.imwrite(save_path+fname_image, res) | python | 11 | 0.675232 | 90 | 32.357143 | 42 | starcoderdata |
using System.Collections.Generic;
using System.Linq;
namespace Atata
{
///
/// Represents the value provider class that wraps enumerable of <see cref="DirectorySubject"/> objects and is hosted in <typeparamref name="TOwner"/> object.
///
/// <typeparam name="TOwner">The type of the owner.
public class DirectoryEnumerableProvider : EnumerableValueProvider<DirectorySubject, TOwner>
{
///
/// Initializes a new instance of the <see cref="DirectoryEnumerableProvider{TOwner}"/> class.
///
/// <param name="owner">The owner.
/// <param name="objectSource">The object source.
/// <param name="providerName">Name of the provider.
public DirectoryEnumerableProvider(
TOwner owner,
IObjectSource objectSource,
string providerName)
: base(owner, objectSource, providerName)
{
}
///
/// Gets the directory names.
///
public EnumerableValueProvider<ValueProvider<string, DirectorySubject>, TOwner> Names =>
this.Query(nameof(Names), q => q.Select(x => x.Name));
///
/// Gets the <see cref="DirectorySubject"/> for the directory with the specified name.
///
///
/// The <see cref="DirectorySubject"/>.
///
/// <param name="directoryName">Name of the directory.
/// <see cref="DirectorySubject"/> instance.
public DirectorySubject this[string directoryName]
{
get
{
var item = Value.First(x => x.Name == directoryName);
item.ProviderName = $"[\"{directoryName}\"]";
return item;
}
}
}
} | c# | 19 | 0.587904 | 162 | 36.519231 | 52 | starcoderdata |
function () {
assert.doesNotThrow(function () {
var src = fs.readFileSync(outFile).toString()
, map = fs.readFileSync(outMapFile).toString();
// Ensures that the map is a valid one
validate(src, map);
// This ensures that the mapFile argument appears in the src as the comment
assert.ok(src.indexOf(mapFile) >= 0, 'The map argument should have been used');
// If paths were compressed, then this path should never appear in the map
assert.ok(map.indexOf(compressPath) < 0, 'The compressPath option should have been used');
}, 'The bundle should have a valid external sourcemap');
next();
} | javascript | 15 | 0.666667 | 96 | 37.705882 | 17 | inline |
package handlers;
import enums.DecryptEnums;
import enums.EncryptEnums;
public interface EncryptAndDecrypHandler {
String textToBeEncrypted(String decrypted, EncryptEnums encryptEnums);
String textToBeDecrypted(String encrypted, DecryptEnums decryptEnums);
} | java | 6 | 0.825926 | 74 | 26 | 10 | starcoderdata |
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.workbench.screens.guided.dtable.client.wizard.column.plugins;
import java.util.ArrayList;
import java.util.List;
import com.google.gwtmockito.GwtMockitoTestRunner;
import com.google.gwtmockito.WithClassesToStub;
import org.drools.workbench.models.datamodel.rule.BaseSingleFieldConstraint;
import org.drools.workbench.models.datamodel.rule.FactPattern;
import org.drools.workbench.models.datamodel.rule.IPattern;
import org.drools.workbench.models.datamodel.rule.RuleModel;
import org.drools.workbench.models.datamodel.rule.SingleFieldConstraint;
import org.drools.workbench.models.guided.dtable.shared.model.BRLConditionColumn;
import org.drools.workbench.models.guided.dtable.shared.model.BRLConditionVariableColumn;
import org.drools.workbench.models.guided.dtable.shared.model.BRLRuleModel;
import org.drools.workbench.models.guided.dtable.shared.model.DTCellValue52;
import org.drools.workbench.models.guided.dtable.shared.model.GuidedDecisionTable52;
import org.drools.workbench.screens.guided.dtable.client.widget.table.GuidedDecisionTableView;
import org.drools.workbench.screens.guided.dtable.client.wizard.column.pages.AdditionalInfoPage;
import org.drools.workbench.screens.guided.dtable.client.wizard.column.pages.DefaultValuesPage;
import org.drools.workbench.screens.guided.dtable.client.wizard.column.pages.RuleModellerPage;
import org.jboss.errai.ui.client.local.spi.TranslationService;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.uberfire.ext.widgets.core.client.wizards.WizardPageStatusChangeEvent;
import org.uberfire.mocks.EventSourceMock;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
@RunWith(GwtMockitoTestRunner.class)
@WithClassesToStub(BRLRuleModel.class)
public class BRLConditionColumnPluginOperatorTest {
@Captor
private ArgumentCaptor listArgumentCaptor;
@Mock
private RuleModellerPage ruleModellerPage;
@Mock
private AdditionalInfoPage additionalInfoPage;
@Mock
private EventSourceMock changeEvent;
@Mock
private GuidedDecisionTableView.Presenter presenter;
@Mock
private GuidedDecisionTable52 model;
@Mock
private TranslationService translationService;
@Mock
private BRLConditionColumn editingCol;
@InjectMocks
private BRLConditionColumnPlugin plugin = spy(new BRLConditionColumnPlugin(ruleModellerPage,
mock(DefaultValuesPage.class),
additionalInfoPage,
changeEvent,
translationService) {
@Override
public RuleModel getRuleModel() {
final RuleModel ruleModel = new RuleModel();
ruleModel.lhs = new IPattern[1];
final FactPattern factPattern = new FactPattern();
final SingleFieldConstraint constraint = new SingleFieldConstraint();
constraint.setValue("var1");
constraint.setFieldType("fieldType");
constraint.setFactType("factType");
constraint.setFieldName("fieldName");
constraint.setOperator("in");
constraint.setConstraintValueType(BaseSingleFieldConstraint.TYPE_TEMPLATE);
factPattern.addConstraint(constraint);
ruleModel.lhs[0] = factPattern;
return ruleModel;
}
});
@Before
public void setup() {
doReturn(presenter).when(plugin).getPresenter();
doReturn(model).when(presenter).getModel();
}
@Test
public void getDefinedVariables() {
doReturn(true).when(plugin).isNewColumn();
doReturn(model).when(presenter).getModel();
doReturn("header").when(editingCol).getHeader();
plugin.generateColumn();
verify(editingCol).setChildColumns(listArgumentCaptor.capture());
final List value = listArgumentCaptor.getValue();
assertEquals(1, value.size());
final BRLConditionVariableColumn brlConditionVariableColumn = value.get(0);
assertEquals("fieldType", brlConditionVariableColumn.getFieldType());
assertEquals("var1", brlConditionVariableColumn.getVarName());
assertEquals("fieldName", brlConditionVariableColumn.getFactField());
assertEquals("in", brlConditionVariableColumn.getOperator());
assertNull(brlConditionVariableColumn.getDefaultValue());
}
@Test
public void getDefinedVariables2() {
doReturn(true).when(plugin).isNewColumn();
doReturn(model).when(presenter).getModel();
doReturn("header").when(editingCol).getHeader();
final ArrayList childColumns = new ArrayList<>();
final BRLConditionVariableColumn brlConditionVariableColumn1 = new BRLConditionVariableColumn("var1",
"fieldType");
final DTCellValue52 defaultValue = new DTCellValue52();
brlConditionVariableColumn1.setDefaultValue(defaultValue);
childColumns.add(brlConditionVariableColumn1);
childColumns.add(new BRLConditionVariableColumn("var2",
"fieldType"));
doReturn(childColumns).when(editingCol).getChildColumns();
plugin.generateColumn();
verify(editingCol).setChildColumns(listArgumentCaptor.capture());
final List value = listArgumentCaptor.getValue();
assertEquals(1, value.size());
final BRLConditionVariableColumn brlConditionVariableColumn = value.get(0);
assertEquals(defaultValue, brlConditionVariableColumn.getDefaultValue());
}
@Test
public void cloneBRLConditionColumn() {
final BRLConditionColumn original = new BRLConditionColumn();
original.setHeader("header");
original.setOperator("==");
final BRLConditionColumn clone = plugin.clone(original);
assertEquals("header", clone.getHeader());
assertEquals("==", clone.getOperator());
}
@Test
public void cloneVariable() {
final BRLConditionVariableColumn original = new BRLConditionVariableColumn();
original.setHeader("header");
original.setOperator("==");
final BRLConditionVariableColumn clone = plugin.cloneVariable(original);
assertEquals("header", clone.getHeader());
assertEquals("==", clone.getOperator());
}
} | java | 19 | 0.705261 | 240 | 43.144444 | 180 | starcoderdata |
using System.Threading.Tasks;
namespace ActIt
{
public interface IOpenPlotListenerAsyncHandler<in TEvent>
{
Task Handle(TEvent theEvent, SceneActor actor);
}
} | c# | 8 | 0.717949 | 61 | 18.6 | 10 | starcoderdata |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace Brain
{
static class Program
{
///
/// The main entry point for the application.
///
[STAThread]
static void Main()
{
Application.SetHighDpiMode(HighDpiMode.SystemAware);
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
Form1 f = new Form1
{
Text = "Brain"
};
Application.Run(f);
}
public static void WaitSec(int secs)
{
// code found at: https://www.reddit.com/r/csharp/comments/bmxb0e/less_harsh_alternative_to_threadsleep/
// this was the best alternative to Thread.Sleep() method I could find
DateTime Tthen = DateTime.Now;
do
{
Application.DoEvents();
} while (Tthen.AddSeconds(secs) > DateTime.Now);
}
}
} | c# | 15 | 0.572581 | 116 | 28.394737 | 38 | starcoderdata |
const express = require("express");
const router = express.Router();
const fs = require("fs");
const util = require("util");
const readFileSync = util.promisify(fs.readFile);
const writeFileSync = util.promisify(fs.writeFile);
router.get("/api/notes", async (req, res) => {
let countData = await readFileSync("./db/db.json");
countData = JSON.parse(countData);
res.send(countData);
});
router.post("/api/notes", async (req, res) => {
let notesData = await readFileSync("./db/db.json");
notesData = JSON.parse(notesData);
req.body.id = notesData.length + 1;
const { id, title, text } = req.body;
notesData.push({ id, title, text });
await writeFileSync("./db/db.json", JSON.stringify(notesData, null, 2));
res.json({ msg: "Notes added!" });
});
router.delete("/api/notes/:id", async (req, res) => {
let notesData = await readFileSync("./db/db.json", "utf8");
notesData = JSON.parse(notesData);
const notesId = req.params.id;
notesData.forEach((value, index) => {
if (value.id == notesId) {
notesData.splice(index, 1);
}
});
await writeFileSync("./db/db.json", JSON.stringify(notesData, null, 2));
res.send("Deleted notes!");
});
module.exports = router; | javascript | 17 | 0.661847 | 74 | 30.923077 | 39 | starcoderdata |
package com.github.catalin.cretu.verspaetung.jpa;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.time.LocalTime;
import java.util.List;
import java.util.Optional;
@Repository
public interface VehicleJpaRepository extends JpaRepository<VehicleEntity, Long> {
Optional findByLineName(final String lineName);
boolean existsByLineStopTimesStopId(final Long stopId);
@Query("select" +
" vehicle.id as vehicleId," +
" line.id as lineId," +
" line.name as lineName," +
" stopTimes.time as time," +
" lineDelay.delay as delay " +
"from VehicleEntity vehicle " +
"join vehicle.line line " +
"join line.delay lineDelay " +
"join line.stopTimes stopTimes " +
"join stopTimes.stop stop " +
"where " +
" stop.id = :stopId " +
"order by " +
" stopTimes.time," +
" lineDelay.delay")
List findNextAtStop(@Param("stopId") final Long stopId);
@Query("select vehicle " +
"from VehicleEntity vehicle " +
"join vehicle.line line " +
"join line.stopTimes stopTimes " +
"join stopTimes.stop stop " +
"where " +
" stopTimes.time = :stopTime " +
"and stop.xCoordinate = :stopX " +
"and stop.yCoordinate = :stopY")
List findByStop(
@Param("stopTime") final LocalTime stopTime,
@Param("stopX") final Integer stopX,
@Param("stopY") final Integer stopY);
} | java | 23 | 0.615714 | 83 | 35.705882 | 51 | starcoderdata |
<?php
class dmProject
{
protected static
$key,
$hash,
$models,
$allModels,
$dmModels;
/**
* Returns project key based on his dir_name
*/
public static function getKey()
{
if (null === self::$key)
{
self::$key = basename(sfConfig::get('sf_root_dir'));
}
return self::$key;
}
/**
* Returns project key based on his root dir
*/
public static function getHash()
{
if (null === self::$hash)
{
self::$hash = substr(md5(sfConfig::get('sf_root_dir')), -8);
}
return self::$hash;
}
public static function getModels()
{
if (null === self::$models)
{
$libDir = dmOs::normalize(sfConfig::get('sf_lib_dir'));
$baseFiles = array_merge(
glob($libDir.'/model/doctrine/base/Base*.class.php'),
glob($libDir.'/model/doctrine/*Plugin/base/Base*.class.php')
);
$dmCoreDir = dmOs::join($libDir, 'model/doctrine/dmCorePlugin/base/');
$dmUserDir = dmOs::join($libDir, 'model/doctrine/dmUserPlugin/base/');
foreach($baseFiles as $index => $file)
{
if(0 === strpos($file, $dmCoreDir) || 0 === strpos($file, $dmUserDir))
{
unset($baseFiles[$index]);
}
}
self::$models = self::getModelsFromBaseFiles($baseFiles);
}
return self::$models;
}
public static function getAllModels()
{
if (null === self::$allModels)
{
$baseFiles = array_merge(
glob(sfConfig::get('sf_lib_dir').'/model/doctrine/base/Base*.class.php'),
glob(sfConfig::get('sf_lib_dir').'/model/doctrine/*Plugin/base/Base*.class.php')
);
self::$allModels = self::getModelsFromBaseFiles($baseFiles);
}
return self::$allModels;
}
public static function getDmModels()
{
if (null === self::$dmModels)
{
$baseFiles = glob(dmOs::join(sfConfig::get('sf_lib_dir'), 'model/doctrine/dmCorePlugin/base/Base*.class.php'));
self::$dmModels = self::getModelsFromBaseFiles($baseFiles);
}
return self::$dmModels;
}
protected static function getModelsFromBaseFiles(array $files)
{
$models = array();
foreach($files as $file)
{
$models[] = preg_replace('|^Base(\w+).class.php$|', '$1', basename($file));
}
return $models;
}
public static function getRootDir()
{
return dmOs::normalize(sfConfig::get('sf_root_dir'));
}
public static function getNormalizedRootDir()
{
return dmOs::normalize(self::getRootDir());
}
/**
* remove sfConfig::get('sf_root_dir') from path
*/
public static function unRootify($path)
{
if (self::isInProject($path))
{
$path = substr($path, strlen(self::getRootDir()));
}
return trim($path, '/');
}
/**
* add sfConfig::get('sf_root_dir') to path
*/
public static function rootify($path)
{
if (!self::isInProject($path))
{
$path = dmOs::join(self::getRootDir(), $path);
}
else
{
$path = dmOs::join($path);
}
return $path;
}
public static function isInProject($path)
{
return strpos(dmOs::normalize($path), self::getRootDir()) === 0;
}
public static function appExists($application)
{
return file_exists(self::rootify('apps/'.$application.'/config/'.$application.'Configuration.class.php'));
}
} | php | 20 | 0.587455 | 117 | 20.679739 | 153 | starcoderdata |
string SqkWznmWrsrv::getSquawkEngbase(
DbsWznm* dbswznm
, DpchInvWznmWrsrvEngbase* dpchinv
) {
// example: "write main engine C++ code basics for version 'BeamRelay 0.1'"
return("write main engine C++ code basics for version '" + StubWznm::getStubVerStd(dbswznm, dpchinv->refWznmMVersion) + "'"); // IP getSquawkEngbase --- RLINE
} | c++ | 10 | 0.72434 | 159 | 47.857143 | 7 | inline |
using Newtonsoft.Json;
namespace Kooboo.Sites.Payment.Methods.Adyen.Lib
{
public class AdyenPaymentRequest
{
[JsonProperty("reference")]
public string Reference { get; set; }
[JsonProperty("amount")]
public AdyenAmount Amount { get; set; }
[JsonProperty("countryCode")]
public string CountryCode { get; set; }
[JsonProperty("merchantAccount")]
public string MerchantAccount { get; set; }
[JsonProperty("returnUrl")]
public string ReturnUrl { get; set; }
[JsonProperty("description")]
public string Description { get; set; }
}
} | c# | 11 | 0.618974 | 51 | 24.72 | 25 | starcoderdata |
/**************************************************************************\
* This file is part of CaSPER. *
* *
* Copyright: *
* 2011-2011 - *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* http://www.apache.org/licenses/LICENSE-2.0 *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, *
* either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
\*************************************************************************/
#ifndef CASPER_KERNEL_OBJ_BNDEXPR_H_
#define CASPER_KERNEL_OBJ_BNDEXPR_H_
#include
namespace Casper {
namespace Detail {
template<class T,class Eval>
struct Create >
{
CP::BndExpr operator()(CP::Store& store, const T& t);
};
template<class T,class Eval>
CP::BndExpr Create >::operator()(CP::Store& store, const T& t)
{ return CP::BndExpr }
//template<class Eval,class T>
//CP::BndExpr ExprWrapper store) const
//{ return Create >()(store,t); }
//
//template<class T>
//CP::BndExpr ExprWrapper store) const
//{ return Create >()(store,t); }
} // Detail
namespace CP {
template<class Eval>
struct BndView > : BndExpr
{
BndView(Store& store, const Expr e) :
BndExpr {}
const Expr getObj() const { return expr; }
Expr expr;
};
// element among Expr >
template<class Eval>
struct BndViewRel2 >,Expr :
BndView >
{
CASPER_ASSERT_BNDVIEW_EVAL(Eval)
typedef BndView > Super;
typedef Var<Eval,typename Traits::GetDefaultDom Elem;
static Super getSuper(Store& s,const Expr >& p1,const Expr p2)
{
if (ValView >(s,p2).ground())
{
int idx = ValView >(s,p2).value();
ElementView > > elem(p1);
return Super(s,elem.get(idx));
}
Elem v(s,Detail::VarDomCreator<typename Elem::Dom>().unionOf(s,p1));
s.post(elementEqual(p1,p2,v));
return Super(s,v);
}
Rel2 >,Expr > getObj() const
{ return Rel2 >,Expr >(v1,v2); }
BndViewRel2(Store& store,const Expr >& p1, const Expr p2) :
Super(getSuper(store,p1,p2)),
v1(p1),v2(p2)
{}
Expr > v1;
Expr v2;
};
} // CP
}
#endif /* CASPER_KERNEL_OBJ_BNDEXPR_H_ */ | c | 15 | 0.552741 | 88 | 32.89899 | 99 | starcoderdata |
'use strict';
/* jshint ignore:start */
/**
* This code was generated by
* \ / _ _ _| _ _
* | (_)\/(_)(_|\/| |(/_ v1.0.0
* / /
*/
/* jshint ignore:end */
var Q = require('q'); /* jshint ignore:line */
var _ = require('lodash'); /* jshint ignore:line */
var Page = require('../../../../base/Page'); /* jshint ignore:line */
var deserialize = require(
'../../../../base/deserialize'); /* jshint ignore:line */
var values = require('../../../../base/values'); /* jshint ignore:line */
var TokenList;
var TokenPage;
var TokenInstance;
/* jshint ignore:start */
/**
* @constructor Twilio.Api.V2010.AccountContext.TokenList
* @description Initialize the TokenList
*
* @param {Twilio.Api.V2010} version - Version of the resource
* @param {string} accountSid - The unique sid that identifies this account
*/
/* jshint ignore:end */
TokenList = function TokenList(version, accountSid) {
/* jshint ignore:start */
/**
* @function tokens
* @memberof Twilio.Api.V2010.AccountContext
* @instance
*
* @param {string} sid - sid of instance
*
* @returns {Twilio.Api.V2010.AccountContext.TokenContext}
*/
/* jshint ignore:end */
function TokenListInstance(sid) {
return TokenListInstance.get(sid);
}
TokenListInstance._version = version;
// Path Solution
TokenListInstance._solution = {accountSid: accountSid};
TokenListInstance._uri = _.template(
'/Accounts/<%= accountSid %>/Tokens.json' // jshint ignore:line
)(TokenListInstance._solution);
/* jshint ignore:start */
/**
* create a TokenInstance
*
* @function create
* @memberof Twilio.Api.V2010.AccountContext.TokenList
* @instance
*
* @param {object|function} opts - ...
* @param {string} opts.accountSid - The account_sid
* @param {number} [opts.ttl] - The duration in seconds the credentials are valid
* @param {function} [callback] - Callback to handle processed record
*
* @returns {Promise} Resolves to processed TokenInstance
*/
/* jshint ignore:end */
TokenListInstance.create = function create(opts, callback) {
if (_.isFunction(opts)) {
callback = opts;
opts = {};
}
opts = opts || {};
var deferred = Q.defer();
var data = values.of({'Ttl': _.get(opts, 'ttl')});
var promise = this._version.create({uri: this._uri, method: 'POST', data: data});
promise = promise.then(function(payload) {
deferred.resolve(new TokenInstance(this._version, payload));
}.bind(this));
promise.catch(function(error) {
deferred.reject(error);
});
if (_.isFunction(callback)) {
deferred.promise.nodeify(callback);
}
return deferred.promise;
};
return TokenListInstance;
};
/* jshint ignore:start */
/**
* @constructor Twilio.Api.V2010.AccountContext.TokenPage
* @augments Page
* @description Initialize the TokenPage
*
* @param {Twilio.Api.V2010} version - Version of the resource
* @param {object} response - Response from the API
* @param {object} solution - Path solution
*
* @returns TokenPage
*/
/* jshint ignore:end */
TokenPage = function TokenPage(version, response, solution) {
// Path Solution
this._solution = solution;
Page.prototype.constructor.call(this, version, response, this._solution);
};
_.extend(TokenPage.prototype, Page.prototype);
TokenPage.prototype.constructor = TokenPage;
/* jshint ignore:start */
/**
* Build an instance of TokenInstance
*
* @function getInstance
* @memberof Twilio.Api.V2010.AccountContext.TokenPage
* @instance
*
* @param {object} payload - Payload response from the API
*
* @returns TokenInstance
*/
/* jshint ignore:end */
TokenPage.prototype.getInstance = function getInstance(payload) {
return new TokenInstance(this._version, payload, this._solution.accountSid);
};
/* jshint ignore:start */
/**
* @constructor Twilio.Api.V2010.AccountContext.TokenInstance
* @description Initialize the TokenContext
*
* @property {string} accountSid - The unique sid that identifies this account
* @property {Date} dateCreated - The date this resource was created
* @property {Date} dateUpdated - The date this resource was last updated
* @property {string} iceServers - An array representing the ephemeral credentials
* @property {string} password - The for authenticating
* @property {string} ttl - The duration in seconds the credentials are valid
* @property {string} username -
* The temporary username that uniquely identifies a Token.
*
* @param {Twilio.Api.V2010} version - Version of the resource
* @param {object} payload - The instance payload
*/
/* jshint ignore:end */
TokenInstance = function TokenInstance(version, payload, accountSid) {
this._version = version;
// Marshaled Properties
this.accountSid = payload.account_sid; // jshint ignore:line
this.dateCreated = deserialize.rfc2822DateTime(payload.date_created); // jshint ignore:line
this.dateUpdated = deserialize.rfc2822DateTime(payload.date_updated); // jshint ignore:line
this.iceServers = payload.ice_servers; // jshint ignore:line
this.password = // jshint ignore:line
this.ttl = payload.ttl; // jshint ignore:line
this.username = payload.username; // jshint ignore:line
// Context
this._context = undefined;
this._solution = {accountSid: accountSid, };
};
module.exports = {
TokenList: TokenList,
TokenPage: TokenPage,
TokenInstance: TokenInstance
}; | javascript | 23 | 0.680816 | 93 | 29.066298 | 181 | starcoderdata |
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Brexis\LaravelWorkflow\Traits\WorkflowTrait;
class Captura extends Model
{
use WorkflowTrait;
protected $fillable = [
"workflow_state",
"id_orden",
"id_requerimiento",
"id_expediente",
"id_lugar",
"id_funcionario",
"descripcion_captura",
"observaciones",
"fecha_captura"
];
public function capturable(){
return $this->morphTo();
}
//esta tiene relacion con la orden de pj
public function orden_captura()
{
return $this->hasOne(Orden::class, 'id_orden');
}
//esta tiene relacion con el requerimiento de MP
public function requerimiento()
{
return $this->hasOne(Requerimiento::class, 'id_requerimiento');
}
public function expediente()
{
return $this->hasOne(Expediente::class, 'id_expediente');
}
public function detenidos()
{
return $this->hasMany(Detenido::class, 'id_captura');
}
public function LugarSS()
{
return $this->belongsTo(LugarSS::class, 'id_lugar');
}
public function funcionarios() {
return $this->belongsTo(FuncionarioSS::class, 'id_funcionario');
}
public function evidencias(){
return $this->hasMany(Evidencia::class, 'id_captura');
}
} | php | 10 | 0.647745 | 71 | 20.081967 | 61 | starcoderdata |
<?php
namespace App\Http\Controllers\Site;
use App\User;
use Illuminate\Http\Request;
use App\Http\Requests;
use App\Http\Controllers\Controller;
use Illuminate\Support\Facades\App;
use Illuminate\Support\Facades\URL;
class SitemapController extends Controller
{
public function getSitemap() {
$sitemap = App::make("sitemap");
$sitemap->setCache('sitemap', 3600);
if (!$sitemap->isCached()) {
$sitemap->add(URL::to('p/about'), time(), '0.9', 'monthly');
$sitemap->add(URL::to('artisans'), time(), '1.0', 'monthly');
$sitemap->add(URL::to('sponsor'), time(), '0.9', 'monthly');
$users = User::all();
foreach ($users as $user) {
$sitemap->add($user->username, $user->created_at, '0.9', 'monthly', $user->avatar);
}
}
return $sitemap->render('xml');
}
} | php | 15 | 0.592865 | 99 | 27.029412 | 34 | starcoderdata |
// Copyright 2012 The Noda Time Authors. All rights reserved.
// Use of this source code is governed by the Apache License 2.0,
// as found in the LICENSE.txt file.
using System;
using NodaTime.TimeZones;
using NodaTime.Utility;
using JetBrains.Annotations;
namespace NodaTime
{
///
/// Static access to date/time zone providers built into Noda Time and for global configuration where this is unavoidable.
/// All properties are thread-safe, and the providers returned by the read-only properties cache their results.
///
public static class DateTimeZoneProviders
{
///
/// Gets a time zone provider which uses a <see cref="TzdbDateTimeZoneSource"/>.
/// The underlying source is <see cref="TzdbDateTimeZoneSource.Default"/>, which is initialized from
/// resources within the NodaTime assembly.
///
/// time zone provider using a
[NotNull] public static IDateTimeZoneProvider Tzdb => TzdbHolder.TzdbImpl;
// This class exists to force TZDB initialization to be lazy. We don't want using
// DateTimeZoneProviders.Bcl to force a read/parse of TZDB data.
private static class TzdbHolder
{
// See http://csharpindepth.com/Articles/General/BeforeFieldInit.aspx
static TzdbHolder() {}
internal static readonly DateTimeZoneCache TzdbImpl = new DateTimeZoneCache(TzdbDateTimeZoneSource.Default);
}
#if !NETSTANDARD1_3
// As per TzDbHolder above, this exists to defer construction of a BCL provider until needed.
// While BclDateTimeZoneSource itself is lightweight, DateTimeZoneCache still does a non-trivial amount of work
// on initialisation.
private static class BclHolder
{
static BclHolder() {}
internal static readonly DateTimeZoneCache BclImpl = new DateTimeZoneCache(new BclDateTimeZoneSource());
}
///
/// Gets a time zone provider which uses a <see cref="BclDateTimeZoneSource"/>.
/// This property is not available on the .NET Standard 1.3 build of Noda Time.
///
///
/// note on <see cref="BclDateTimeZone"/> for details of some incompatibilities with the BCL.
///
/// time zone provider which uses a
[NotNull] public static IDateTimeZoneProvider Bcl => BclHolder.BclImpl;
#endif
private static readonly object SerializationProviderLock = new object();
private static IDateTimeZoneProvider serializationProvider;
///
/// Gets the <see cref="IDateTimeZoneProvider"/> to use to interpret a time zone ID read as part of
/// XML or binary serialization.
///
///
/// This property defaults to <see cref="DateTimeZoneProviders.Tzdb"/>. The mere existence of
/// this property is unfortunate, but XML and binary serialization in .NET provide no simple way of configuring
/// appropriate context. It is expected that any single application is unlikely to want to serialize
/// values using different time zone providers.
///
/// to use to interpret a time zone ID read as part of
/// XML or binary serialization.
[NotNull] public static IDateTimeZoneProvider Serialization
{
get
{
lock (SerializationProviderLock)
{
return serializationProvider ?? (serializationProvider = Tzdb);
}
}
set
{
lock (SerializationProviderLock)
{
serializationProvider = Preconditions.CheckNotNull(value, nameof(value));
}
}
}
}
} | c# | 19 | 0.64494 | 126 | 44.344444 | 90 | starcoderdata |
// Copyright (c) 2019 Cisco and/or its affiliates.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package commands
import (
"bytes"
"context"
"fmt"
"io/ioutil"
"path"
"reflect"
"strings"
"time"
"github.com/golang/protobuf/jsonpb"
"github.com/golang/protobuf/proto"
"github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"go.ligato.io/cn-infra/v2/logging"
"go.ligato.io/cn-infra/v2/servicelabel"
agentcli "go.ligato.io/vpp-agent/v3/cmd/agentctl/cli"
"go.ligato.io/vpp-agent/v3/pkg/models"
)
func NewImportCommand(cli agentcli.Cli) *cobra.Command {
var (
opts ImportOptions
timeout uint
)
cmd := &cobra.Command{
Use: "import file",
Args: cobra.ExactArgs(1),
Short: "Import config data from file",
Example: `
To import file contents into Etcd, run:
$ cat input.txt
config/vpp/v2/interfaces/loop1 {"name":"loop1","type":"SOFTWARE_LOOPBACK"}
config/vpp/l2/v2/bridge-domain/bd1 {"name":"bd1"}
$ {{.CommandPath}} input.txt
To import it via gRPC, include --grpc flag:
$ {{.CommandPath}} --grpc=localhost:9111 input.txt
FILE FORMAT
Contents of the import file must contain single key-value pair per line:
...
Empty lines and lines starting with '#' are ignored.
KEY FORMAT
Keys can be defined in two ways:
- full: /vnf-agent/vpp1/config/vpp/v2/interfaces/iface1
- short: config/vpp/v2/interfaces/iface1
For short keys, the import command uses microservice label defined with --service-label.`,
RunE: func(cmd *cobra.Command, args []string) error {
opts.InputFile = args[0]
opts.Timeout = time.Second * time.Duration(timeout)
return RunImport(cli, opts)
},
}
flags := cmd.Flags()
flags.UintVar(&opts.TxOps, "txops", 128, "Number of ops per transaction")
flags.UintVarP(&timeout, "time", "t", 30, "Timeout (in seconds) to wait for server response")
flags.BoolVar(&opts.ViaGrpc, "grpc", false, "Enable to import config via gRPC")
return cmd
}
type ImportOptions struct {
InputFile string
TxOps uint
Timeout time.Duration
ViaGrpc bool
}
func RunImport(cli agentcli.Cli, opts ImportOptions) error {
keyVals, err := parseImportFile(opts.InputFile)
if err != nil {
return fmt.Errorf("parsing import data failed: %v", err)
}
if opts.ViaGrpc {
// Set up a connection to the server.
c, err := cli.Client().ConfigClient()
if err != nil {
return err
}
fmt.Printf("importing %d key vals\n", len(keyVals))
req := c.ChangeRequest()
for _, keyVal := range keyVals {
fmt.Printf(" - %s\n", keyVal.Key)
req.Update(keyVal.Val)
}
fmt.Printf("sending via gRPC\n")
ctx, cancel := context.WithTimeout(context.Background(), opts.Timeout)
defer cancel()
if err := req.Send(ctx); err != nil {
return fmt.Errorf("send failed: %v", err)
}
} else {
c, err := cli.Client().KVDBClient()
if err != nil {
return fmt.Errorf("KVDB error: %v", err)
}
db := c.ProtoBroker()
fmt.Printf("importing %d key vals\n", len(keyVals))
var txn = db.NewTxn()
ops := 0
for i := 0; i < len(keyVals); i++ {
keyVal := keyVals[i]
key, err := c.CompleteFullKey(keyVal.Key)
if err != nil {
return fmt.Errorf("key processing failed: %v", err)
}
fmt.Printf(" - %s\n", key)
txn.Put(key, keyVal.Val)
ops++
if ops == int(opts.TxOps) || i+1 == len(keyVals) {
fmt.Printf("commiting tx with %d ops\n", ops)
ctx, cancel := context.WithTimeout(context.Background(), opts.Timeout)
err = txn.Commit(ctx)
cancel()
if err != nil {
return fmt.Errorf("commit failed: %v", err)
}
ops = 0
txn = db.NewTxn()
}
}
}
logging.Debug("OK")
return nil
}
type keyVal struct {
Key string
Val proto.Message
}
func parseImportFile(importFile string) (keyVals []keyVal, err error) {
b, err := ioutil.ReadFile(importFile)
if err != nil {
return nil, fmt.Errorf("reading input file failed: %v", err)
}
// parse lines
lines := bytes.Split(b, []byte("\n"))
for _, l := range lines {
line := bytes.TrimSpace(l)
if bytes.HasPrefix(line, []byte("#")) {
continue
}
parts := bytes.SplitN(line, []byte(" "), 2)
if len(parts) < 2 {
continue
}
key := string(parts[0])
data := string(parts[1])
if key == "" || data == "" {
continue
}
logrus.Debugf("parse line: %s %s\n", key, data)
//key = completeFullKey(key)
val, err := unmarshalKeyVal(key, data)
if err != nil {
return nil, fmt.Errorf("decoding value failed: %v", err)
}
logrus.Debugf("KEY: %s - %v\n", key, val)
keyVals = append(keyVals, keyVal{key, val})
}
return
}
func unmarshalKeyVal(fullKey string, data string) (proto.Message, error) {
key := stripAgentPrefix(fullKey)
model, err := models.GetModelForKey(key)
if err != nil {
return nil, err
}
valueType := proto.MessageType(model.ProtoName())
if valueType == nil {
return nil, fmt.Errorf("unknown proto message defined for key %s", key)
}
value := reflect.New(valueType.Elem()).Interface().(proto.Message)
if err := jsonpb.UnmarshalString(data, value); err != nil {
return nil, err
}
return value, nil
}
func stripAgentPrefix(key string) string {
if !strings.HasPrefix(key, servicelabel.GetAllAgentsPrefix()) {
return key
}
keyParts := strings.Split(key, "/")
if len(keyParts) < 4 || keyParts[0] != "" {
return path.Join(keyParts[2:]...)
}
return path.Join(keyParts[3:]...)
} | go | 17 | 0.6572 | 94 | 23.590909 | 242 | starcoderdata |
public class _0680ValidPalindromeII {
public boolean validPalindrome(String s) {
int length = s.length();
if (length <= 2) {
return true;
}
int size = length % 2 == 0 ? length / 2 : length / 2 + 1;
boolean mid = false, left = false, right = false;
for (int i = 0; i < size; i++) {
if (i > 0 && i == size - 1) {
return true;
}
boolean tmpMid = s.charAt(i) == s.charAt(length - i - 1);
boolean tmpLeft = s.charAt(i + 1) == s.charAt(length - i - 1);
boolean tmpRight = s.charAt(i) == s.charAt(length - i - 2);
if (i > 0) {
tmpMid = tmpMid && mid;
tmpLeft = tmpLeft && (left || mid);
tmpRight = tmpRight && (right || mid);
}
mid = tmpMid;
left = tmpLeft;
right = tmpRight;
if (!(mid || left || right)) {
return false;
}
}
return true;
}
} | java | 14 | 0.479424 | 70 | 30.354839 | 31 | starcoderdata |
#ifndef MISSION_DEVICES_DEVICEPACKETS_THERMALSENSORPACKET_H_
#define MISSION_DEVICES_DEVICEPACKETS_THERMALSENSORPACKET_H_
#include
#include
#include
#include
namespace TSensorDefinitions {
enum ObjIds: object_id_t {
TEST_HKB_HANDLER = objects::SPI_Test_PT1000,
SYRLINKS_HANDLER = objects::PT1000_Syrlinks_DEC1_O1,
MGT_1_HANDLER = objects::PT1000_MGT1_DEC2,
PLOC_HANDLER = objects::PT1000_PLOC_DEC4,
MESHCAM_HANDLER = objects::PT1000_Camera_DEC1_O2
};
enum PoolIds: lp_id_t {
TEMPERATURE_C,
FAULT_BYTE
};
static constexpr DeviceCommandId_t CONFIG_CMD = 0x80;
static constexpr DeviceCommandId_t REQUEST_CONFIG = 0x00;
static constexpr DeviceCommandId_t REQUEST_RTD = 0x01;
static constexpr DeviceCommandId_t REQUEST_FAULT_BYTE = 0x07;
static constexpr uint32_t THERMAL_SENSOR_SET_ID = REQUEST_RTD;
class ThermalSensorDataset:
public StaticLocalDataSet<sizeof(float) + sizeof(uint8_t)> {
public:
/**
* Constructor used by owner and data creators like device handlers.
* @param owner
* @param setId
*/
ThermalSensorDataset(HasLocalDataPoolIF* owner):
StaticLocalDataSet(owner, THERMAL_SENSOR_SET_ID) {
}
/**
* Constructor used by data users like controllers.
* @param sid
*/
ThermalSensorDataset(object_id_t objectId):
StaticLocalDataSet(sid_t(objectId, THERMAL_SENSOR_SET_ID)) {
}
lp_var_t temperatureCelcius = lp_var_t
PoolIds::TEMPERATURE_C, this);
lp_var_t errorByte = lp_var_t
PoolIds::FAULT_BYTE, this);
};
}
#endif /* MISSION_DEVICES_DEVICEPACKETS_THERMALSENSORPACKET_H_ */ | c | 12 | 0.717615 | 72 | 29.75 | 60 | starcoderdata |
using System;
using Calculator.Interfaces;
using FluentAssertions;
using NSubstitute;
using Selkie.AutoMocking;
namespace Calculator.Tests
{
[AutoDataTestClass]
public class CalculatorTests
{
[AutoDataTestMethod]
public void Add_ForNumbers_Adds(Calculator sut,
[Freeze] IAdd add)
{
add.Execute(1,
2)
.Returns(3);
sut.Add(1,
2)
.Should()
.Be(3);
}
[AutoDataTestMethod]
public void Create_ForAddIsNull_Throws(Lazy sut,
[BeNull] IAdd add)
{
// ReSharper disable once UnusedVariable
Action action = () =>
{
var actual = sut.Value;
};
action.Should()
.Throw
.WithParameter("add");
}
[AutoDataTestMethod]
public void Create_ForSubtractIsNull_Throws(Lazy sut,
[BeNull] ISubtract subtract)
{
// ReSharper disable once UnusedVariable
Action action = () =>
{
var actual = sut.Value;
};
action.Should()
.Throw
.WithParameter("subtract");
}
[AutoDataTestMethod]
public void Subtract_ForNumbers_Subtracts(Calculator sut,
[Freeze] ISubtract subtract)
{
subtract?.Execute(1,
2)
.Returns(-1);
sut.Subtract(1,
2)
.Should()
.Be(-1);
}
}
} | c# | 18 | 0.419261 | 80 | 27.971831 | 71 | starcoderdata |
<?php
require_once 'src/Palindrome.php';
class PalindromeTest extends PHPUnit_Framework_TestCase
{
function test_palindrome_checker_word()
{
$test_Palindrome = new Palindrome;
$input = "racecar";
$result = $test_Palindrome->checkPalindrome($input);
$this->assertEquals("True", $result);
}
function test_palindrome_checker_sentence()
{
$test_Palindrome = new Palindrome;
$input = "race car";
$result = $test_Palindrome->checkPalindrome_sentence($input);
$this->assertEquals("True", $result);
}
function test_palindrome_checker_sentencewords_none()
{
$test_Palindrome = new Palindrome;
$input = "this cat";
$result = $test_Palindrome->checkPalindrome_words_in_sentence($input);
$this->assertEquals("None of these are palindromes", $result);
}
function test_palindrome_checker_sentencewords_one()
{
$test_Palindrome = new Palindrome;
$input = "this racecar";
$result = $test_Palindrome->checkPalindrome_words_in_sentence($input);
$this->assertEquals("This is a palindrome: racecar", $result);
}
function test_palindrome_checker_sentencewords_some()
{
$test_Palindrome = new Palindrome;
$input = "this racecar hannah";
$result = $test_Palindrome->checkPalindrome_words_in_sentence($input);
$this->assertEquals("These are palindromes: racecar, hannah", $result);
}
function test_palindrome_hardMode()
{
$test_Palindrome = new Palindrome;
$input = "race car";
$result = $test_Palindrome->checkPalindrome_hardMode($input);
$this->assertEquals("This sentence is a palindrome", $result);
}
}
?> | php | 11 | 0.565041 | 83 | 23 | 82 | starcoderdata |
OI::OI() {
// Process operator interface input here.
// BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
xboxController.reset(new Joystick(0));
intakeDownButton.reset(new JoystickButton(xboxController.get(), 2));
intakeDownButton->WhileHeld(new IntakeGroup(false));
intakeUpButton.reset(new JoystickButton(xboxController.get(), 1));
intakeUpButton->WhileHeld(new IntakeGroup(true));
fireButton.reset(new JoystickButton(xboxController.get(), 4));
fireButton->WhileHeld(new Shoot(0));
// SmartDashboard Buttons
SmartDashboard::PutData("Autonomous Command", new AutonomousCommand());
SmartDashboard::PutData("DriveConveyor: Up", new DriveConveyor(true));
SmartDashboard::PutData("DriveConveyor: Down", new DriveConveyor(false));
SmartDashboard::PutData("Shoot: default", new Shoot(0.8));
SmartDashboard::PutData("Shoot: off", new Shoot(0));
SmartDashboard::PutData("RotateTurret: left", new RotateTurret(-1));
SmartDashboard::PutData("RotateTurret: right", new RotateTurret(1));
SmartDashboard::PutData("RotateTurret: stop", new RotateTurret(0));
SmartDashboard::PutData("Drive: Stop", new Drive(0, 0));
SmartDashboard::PutData("DriveIntakeRollers: In", new DriveIntakeRollers(true));
SmartDashboard::PutData("DriveIntakeRollers: Out", new DriveIntakeRollers(false));
SmartDashboard::PutData("IntakeGroup: Up", new IntakeGroup(true));
SmartDashboard::PutData("IntakeGroup: Down", new IntakeGroup(false));
// END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTRUCTORS
} | c++ | 10 | 0.735145 | 86 | 53.586207 | 29 | inline |
function fn(x, total, val) {
var replaced;
let wholeNumber = val.toString();
if (x === true) {
for (var i = 0; i < total; i++) {
wholeNumber++;
}
}
return wholeNumber;
}
global.__optimize && __optimize(fn);
inspect = function() {
return fn(false, 10, 5);
}; | javascript | 9 | 0.559441 | 37 | 16.875 | 16 | starcoderdata |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.