code
stringlengths 0
29.6k
| language
stringclasses 9
values | AST_depth
int64 3
30
| alphanumeric_fraction
float64 0.2
0.86
| max_line_length
int64 13
399
| avg_line_length
float64 5.02
139
| num_lines
int64 7
299
| source
stringclasses 4
values |
---|---|---|---|---|---|---|---|
def test_get_latest_returns_latest(self):
"""
Make sure we can get the latest time frame.
"""
first, mid, last = self._create_three()
latest = TimeFrame.get_latest()
self.assertEqual(latest, last) | python | 7 | 0.578947 | 51 | 23.8 | 10 | inline |
#include <bits/stdc++.h>
#define ll long long
#define pii pair<int,int>
#define pll pair<ll,ll>
#define pli pair<ll,int>
#define fi first
#define se second
#define inf (INT_MAX/2-1)
#define infl (1LL<<60)
#define vi vector<int>
#define pb push_back
#define sz(a) (int)(a).size()
#define all(a) begin(a),end(a)
#define y0 y5656
#define y1 y7878
#define aaa system("pause");
#define dbg(x) cerr<<(#x)<<": "<<(x)<<'\n',aaa
#define dbga(x,n) cerr<<(#x)<<"[]: ";for(int _=0;_<n;_++)cerr<<x[_]<<' ';cerr<<'\n',aaa
#define dbgs(x) cerr<<(#x)<<"[stl]: ";for(int _:x)cerr<<_<<' ';cerr<<'\n',aaa
#define dbgp(x) cerr<<(#x)<<": "<<x.fi<<' '<<x.se<<'\n',aaa
#define maxn 100000
using namespace std;
struct yes { int r[2][2]; int R; };
ll mod;
vi g[maxn+5], fii[maxn+5], pref[maxn+5], suff[maxn+5];
int wh[maxn+5];///wh[nod]=indicele unde se gaseste nod in fii[tata[nod]]
int tata[maxn+5];
ll jos[maxn+5], sus[maxn+5];
///jos[nod]=in cate moduri poti face o pata care porneste din nod si merge in subarbore
///sus[nod]=in cate moduri poti face o pata care porneste din nod si merge in supraarbore
void mad (ll &a, ll b) { a = (a+b) % mod; }
void mul (ll &a, ll b) { a = (a*b) % mod; }
ll msu (vector<ll> u) {
ll w = 0;
for (ll x: u) mad(w, x);
return w;
}
ll mpr (vector<ll> u) {
ll w = 1;
for (ll x: u) mul(w, x);
return w;
}
void dfsi (int nod, int t) {
tata[nod] = t;
for (int nn: g[nod])
if (nn != t) wh[nn] = sz(fii[nod]), fii[nod].pb(nn), dfsi(nn, nod);
}
void dfsj (int nod) {
for (int nn: fii[nod]) dfsj(nn);
jos[nod] = 1;
for (int nn: fii[nod]) mul(jos[nod], msu({jos[nn], 1}));
}
void dfss (int nod) {
if (nod == 1) sus[nod] = 1;
else {
ll cur = sus[tata[nod]];
if (wh[nod] > 0) mul(cur, pref[tata[nod]][wh[nod]-1]);
if (wh[nod] < sz(fii[tata[nod]])-1) mul(cur, suff[tata[nod]][wh[nod]+1]);
sus[nod] = msu({cur, 1});
}
for (int nn: fii[nod]) dfss(nn);
}
int main () {
ios::sync_with_stdio(false); cin.tie(0); cout.tie(0);
int n; cin >> n >> mod;
int i, j, z, a, b;
for (i = 1; i < n; i++) {
cin >> a >> b;
g[a].pb(b); g[b].pb(a);
}
dfsi(1, 0);
dfsj(1);
for (i = 1; i <= n; i++)
if (sz(fii[i]) > 0) {
pref[i].resize(sz(fii[i]));
pref[i][0] = msu({jos[fii[i][0]], 1});
for (j = 1; j < sz(fii[i]); j++)
pref[i][j] = mpr({pref[i][j-1], msu({jos[fii[i][j]], 1})});
suff[i].resize(sz(fii[i]));
suff[i][sz(fii[i])-1] = msu({jos[fii[i][sz(fii[i])-1]], 1});
for (j = sz(fii[i])-2; j >= 0; j--)
suff[i][j] = mpr({suff[i][j+1], msu({jos[fii[i][j]], 1})});
}
dfss(1);
for (i = 1; i <= n; i++) cout << mpr({sus[i], jos[i]}) << '\n';
return 0;
}
| c++ | 20 | 0.528929 | 89 | 26.618557 | 97 | codenet |
public void loadFromPath(Path scanPath, Integer size)
{
// Iterate over to files in the directory
try (DirectoryStream<Path> files = Files.newDirectoryStream(scanPath, "*.png"))
{
for (Path iconFile : files)
{
String iconName = iconFile.getFileName().toString().split("\\.")[0];
// Assign symlinks as aliases for the real file
if (Files.isSymbolicLink(iconFile))
{
Path target = Files.readSymbolicLink(iconFile);
ThemeIcon icon = icons.get(target.getFileName().toString().split("\\.")[0]);
// We need to load the target icon first
if (icon == null)
icon = loadIcon(target, size);
icons.put(iconName, icon);
}
else
{
loadIcon(iconFile, size);
}
}
}
catch (IOException e)
{
log.error("Failed to open DirectoryStream for folder {}", scanPath.toString());
}
} | java | 18 | 0.477233 | 96 | 34.71875 | 32 | inline |
/**
* @typedef { import("../../main.js").Config } Config
*
* @typedef { [number, number] } Point
* @typedef { [Point, Point] } Rectangle
*/
import Grid from "../../utils/grid.js";
import {
regionRect,
pickIndexWithWeights,
distanceToBoundaryMaker,
} from "../../utils/weights.js";
import weightAdjusters from "./weight-adjusters.js";
/**
* @param { Config } config
* @returns { Grid }
*/
function initWeightsQuadrants({ size, margins }) {
/** @type { Point } */
const start = [margins, margins];
/** @type { Point } */
const end = [size - margins, size - margins];
if (size % 2 === 0) {
return new Grid(start, end);
}
const middle = (size - 1) / 2;
return new Grid(start, end, 4)
.applyAt(
(val) => val >> 1,
regionRect([start[0], middle], [end[0], middle + 1]),
)
.applyAt(
(val) => val >> 1,
regionRect([middle, start[1]], [middle + 1, end[1]]),
);
}
/**
* @param { Grid } weights
* @returns { Grid }
*/
function initWeightsStair(weights) {
const distanceToBoundary = distanceToBoundaryMaker(
weights.start,
weights.end,
);
return weights.apply(
(val, idx) => val * distanceToBoundary(weights.toVh(idx)),
);
}
/**
* @param { Config } config
* @param { Rectangle[] } allocation
* @returns { Point[] }
*/
function placeWithWeights(config, allocation) {
const { size, margins, allocatorRect, placerRect, weightAdjuster } = config;
/** @type { Point } */
const start = [margins, margins];
/** @type { Point } */
const end = [size - margins, size - margins];
/** @type { Grid } */
let weights;
if (allocatorRect === "quadrants") {
weights = initWeightsQuadrants(config);
} else {
weights = new Grid(start, end);
}
if (placerRect === "weightsStair") {
weights = initWeightsStair(weights);
}
return allocation.reduce((
/** @type { Point[] } */ stones,
[startRect, endRect],
) => {
const subweights = weights.slice(startRect, endRect);
const stn = subweights.toVh(pickIndexWithWeights(subweights.values));
stones.push(stn);
weights = weightAdjusters[weightAdjuster](config, weights, stn);
return stones;
}, []);
}
/**
* @param { Config } config
* @param { Rectangle[] } allocation
* @returns { Point[] }
*/
export function weightsUniform(config, allocation) {
return placeWithWeights(config, allocation);
}
/**
* @param { Config } config
* @param { Rectangle[] } allocation
* @returns { Point[] }
*/
export function weightsStair(config, allocation) {
return placeWithWeights(config, allocation);
} | javascript | 16 | 0.617558 | 78 | 21.87931 | 116 | starcoderdata |
func extractorEnv() ([]string, error) {
var env []string
for _, value := range os.Environ() {
parts := strings.SplitN(value, "=", 2)
// Until kzip support comes along, we only support writing to a single directory so strip these options.
if parts[0] == "KYTHE_INDEX_PACK" || parts[0] == "KYTHE_OUTPUT_FILE" {
continue
} else if parts[0] == "KYTHE_OUTPUT_DIRECTORY" {
// Remap KYTHE_OUTPUT_DIRECTORY to be an absolute path.
output, err := filepath.Abs(parts[1])
if err != nil {
return nil, err
}
env = append(env, "KYTHE_OUTPUT_DIRECTORY="+output)
} else {
// Otherwise, preserve the environment unchanged.
env = append(env, value)
}
}
return env, nil
} | go | 14 | 0.647059 | 106 | 30.727273 | 22 | inline |
func (s *Scheduler) jobDelete(job *jobStruct) {
// assumes you already have the job mutex lock
if job.state != StateStopped {
job.state |= StateDeleting
return
}
s.jobsRWMutex.Lock()
delete(s.jobs, job.name)
s.jobsRWMutex.Unlock()
} | go | 8 | 0.704918 | 47 | 19.416667 | 12 | inline |
/*
* Copyright (C) 2019-2021 ConnectorIO Sp. z o.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.connectorio.cloud.event.mapping.parser;
import org.connectorio.cloud.event.mapping.BindableDestination;
import org.connectorio.cloud.event.mapping.DestinationParser;
import org.connectorio.cloud.event.mapping.MappingTable;
import org.connectorio.cloud.event.mapping.VariableValue;
import org.connectorio.cloud.event.mapping.parser.node.StaticNode;
import org.connectorio.cloud.event.mapping.parser.node.VariableNode;
import org.connectorio.cloud.event.mapping.standard.BoundDestination;
import org.connectorio.cloud.event.mapping.standard.StandardVariableDefinition;
import org.connectorio.cloud.event.mapping.standard.StandardVariableValue;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
public class StandardDestinationParser implements DestinationParser {
private final DestinationTree mappings;
public StandardDestinationParser(MappingTable mapping) {
this.mappings = constructTree(mapping);
}
private DestinationTree constructTree(MappingTable mapping) {
DestinationTree tree = new DestinationTree();
for (BindableDestination destination : mapping.getDestinations()) {
String[] parts = destination.getPath().split("/");
List nodes = new ArrayList<>();
for (String part : parts) {
if (part.contains("{")) {
int beginIndex = part.indexOf('{') + 1;
int endIndex = part.indexOf('}');
nodes.add(new VariableNode(part.substring(beginIndex, endIndex), beginIndex - 1));
} else {
nodes.add(new StaticNode(part));
}
}
tree.push(destination, nodes);
}
return tree;
}
@Override
public BindableDestination parse(String name) {
String[] destination = name.split("/");
return mappings.lookup(destination);
}
private class DestinationTree {
private Map<BindableDestination, List paths = new HashMap<>();
public BindableDestination lookup(String[] elements) {
for (Entry<BindableDestination, List entry : paths.entrySet()) {
List path = entry.getValue();
List values = new ArrayList<>();
if (elements.length == path.size()) {
for (int index = 0; index < elements.length; index++) {
DestinationNode node = path.get(index);
if (!node.matches(elements[index])) {
break;
}
if (node instanceof VariableNode) {
VariableNode variable = (VariableNode) node;
values.add(new StandardVariableValue(new StandardVariableDefinition(variable.getVariable()), elements[index].substring(variable.getOffset())));
}
if (index + 1 < elements.length) {
continue;
}
BindableDestination destination = entry.getKey();
return new BoundDestination(destination.getCode(), destination.getOperation(), destination.getPath(), values);
}
}
}
return null;
}
public void push(BindableDestination destination, List nodes) {
this.paths.put(destination, nodes);
}
}
} | java | 24 | 0.693039 | 157 | 34.072072 | 111 | starcoderdata |
package org.touchhome.bundle.arduino.setting;
import cc.arduino.contributions.ConsoleProgressListener;
import cc.arduino.contributions.ProgressListener;
import cc.arduino.contributions.libraries.ContributedLibrary;
import cc.arduino.contributions.libraries.ContributedLibraryReleases;
import cc.arduino.contributions.libraries.LibraryInstaller;
import lombok.SneakyThrows;
import org.json.JSONObject;
import org.touchhome.bundle.api.EntityContext;
import org.touchhome.bundle.api.console.ConsolePlugin;
import org.touchhome.bundle.api.model.ProgressBar;
import org.touchhome.bundle.api.setting.SettingPluginPackageInstall;
import org.touchhome.bundle.api.setting.console.ConsoleSettingPlugin;
import org.touchhome.bundle.arduino.ArduinoConfiguration;
import org.touchhome.bundle.arduino.ArduinoConsolePlugin;
import processing.app.BaseNoGui;
import processing.app.packages.UserLibrary;
import java.io.IOException;
import java.nio.file.Files;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
public class ConsoleArduinoLibraryManagerSetting implements SettingPluginPackageInstall, ConsoleSettingPlugin {
private static Map<String, ContributedLibraryReleases> releases;
@Override
public String getIcon() {
return SettingPluginPackageInstall.super.getIcon();
}
@Override
public int order() {
return 90;
}
@Override
public boolean acceptConsolePluginPage(ConsolePlugin consolePlugin) {
return consolePlugin instanceof ArduinoConsolePlugin;
}
@Override
public PackageContext installedPackages(EntityContext entityContext) {
Collection bundleEntities = new ArrayList<>();
if (BaseNoGui.packages != null) {
for (UserLibrary library : BaseNoGui.librariesIndexer.getInstalledLibraries()) {
bundleEntities.add(buildBundleEntity(library));
}
}
return new PackageContext(null, bundleEntities);
}
@Override
public PackageContext allPackages(EntityContext entityContext) {
releases = null;
AtomicReference error = new AtomicReference<>();
Collection bundleEntities = new ArrayList<>();
if (BaseNoGui.packages != null) {
for (ContributedLibraryReleases release : getReleases(null, error).values()) {
ContributedLibrary latest = release.getLatest();
bundleEntities.add(buildBundleEntity(release.getReleases().stream().map(ContributedLibrary::getVersion).collect(Collectors.toList()), latest));
}
}
return new PackageContext(error.get(), bundleEntities);
}
@Override
public void installPackage(EntityContext entityContext, PackageRequest packageRequest, ProgressBar progressBar) throws Exception {
if (BaseNoGui.packages != null) {
LibraryInstaller installer = ArduinoConfiguration.getLibraryInstaller();
ContributedLibrary lib = searchLibrary(getReleases(progressBar, null), packageRequest.getName(), packageRequest.getVersion());
List deps = BaseNoGui.librariesIndexer.getIndex().resolveDependeciesOf(lib);
boolean depsInstalled = deps.stream().allMatch(l -> l.getInstalledLibrary().isPresent() || l.getName().equals(lib != null ? lib.getName() : null));
ProgressListener progressListener = progress -> progressBar.progress(progress.getProgress(), progress.getStatus());
if (!depsInstalled) {
installer.install(deps, progressListener);
}
installer.install(lib, progressListener);
reBuildLibraries(entityContext, progressBar);
}
}
@Override
public void unInstallPackage(EntityContext entityContext, PackageRequest packageRequest, ProgressBar progressBar) throws IOException {
if (BaseNoGui.packages != null) {
ContributedLibrary lib = getReleases(progressBar, null).values().stream()
.filter(r -> r.getInstalled().isPresent() && r.getInstalled().get().getName().equals(packageRequest.getName()))
.map(r -> r.getInstalled().get()).findAny().orElse(null);
if (lib == null) {
entityContext.ui().sendErrorMessage("Library '" + packageRequest.getName() + "' not found");
} else if (lib.isIDEBuiltIn()) {
entityContext.ui().sendErrorMessage("Unable remove built-in library: '" + packageRequest.getName() + "'");
} else {
ProgressListener progressListener = progress -> progressBar.progress(progress.getProgress(), progress.getStatus());
LibraryInstaller installer = ArduinoConfiguration.getLibraryInstaller();
installer.remove(lib, progressListener);
reBuildLibraries(entityContext, progressBar);
}
}
}
private ContributedLibrary searchLibrary(Map<String, ContributedLibraryReleases> releases, String name, String version) {
ContributedLibraryReleases release = releases.get(name);
if (release != null) {
return release.getReleases().stream().filter(r -> r.getVersion().equals(version)).findAny().orElse(null);
}
return null;
}
@SneakyThrows
private PackageModel buildBundleEntity(UserLibrary library) {
PackageModel packageModel = new PackageModel()
.setName(library.getName())
.setTitle(library.getSentence())
.setVersion(library.getVersion())
.setWebsite(library.getWebsite())
.setAuthor(library.getAuthor())
.setCategory(library.getCategory())
.setReadme(library.getParagraph());
String[] readmeFiles = library.getInstalledFolder().list((dir, name) -> name.toLowerCase().startsWith("readme."));
if (readmeFiles != null && readmeFiles.length > 0) {
packageModel.setReadme(packageModel.getReadme() + " +
new String(Files.readAllBytes(library.getInstalledFolder().toPath().resolve(readmeFiles[0]))));
}
if (library.isIDEBuiltIn()) {
packageModel.setTags(Collections.singleton("Built-In")).setRemovable(false);
}
return packageModel;
}
private PackageModel buildBundleEntity(List versions, ContributedLibrary library) {
PackageModel packageModel = new PackageModel()
.setName(library.getName())
.setTitle(library.getSentence())
.setVersions(versions)
.setVersion(library.getVersion())
.setSize(library.getSize())
.setWebsite(library.getWebsite())
.setAuthor(library.getAuthor())
.setCategory(library.getCategory())
.setReadme(library.getParagraph());
if (library.isIDEBuiltIn()) {
packageModel.setTags(Collections.singleton("Built-In")).setRemovable(false);
}
return packageModel;
}
private void reBuildLibraries(EntityContext entityContext, ProgressBar progressBar) {
ConsoleArduinoLibraryManagerSetting.releases = null;
getReleases(progressBar, null);
entityContext.ui().reloadWindow("Re-Initialize page after library installation");
}
@SneakyThrows
private synchronized Map<String, ContributedLibraryReleases> getReleases(ProgressBar progressBar, AtomicReference error) {
if (releases == null) {
releases = new HashMap<>();
BaseNoGui.onBoardOrPortChange();
ProgressListener progressListener = progressBar != null ? progress -> progressBar.progress(progress.getProgress(), progress.getStatus()) :
new ConsoleProgressListener();
try {
LibraryInstaller libraryInstaller = ArduinoConfiguration.getLibraryInstaller();
libraryInstaller.updateIndex(progressListener);
} catch (Exception ex) {
if (error == null) {
throw ex;
}
error.set(ex.getMessage());
BaseNoGui.librariesIndexer.parseIndex();
BaseNoGui.librariesIndexer.rescanLibraries();
}
for (ContributedLibrary lib : BaseNoGui.librariesIndexer.getIndex().getLibraries()) {
if (releases.containsKey(lib.getName())) {
releases.get(lib.getName()).add(lib);
} else {
releases.put(lib.getName(), new ContributedLibraryReleases(lib));
}
}
}
return releases;
}
} | java | 21 | 0.661467 | 159 | 44.035897 | 195 | starcoderdata |
def CleanSearchString(self, myString):
''' clean search string before searching on EricVideos '''
log('AGNT :: Original Search Query : {0}'.format(myString))
myString = myString.lower().strip()
# for titles with " - " replace with ":"
myString = myString.replace(' - ', ': ')
# replace curly apostrophes with straight as strip diacritics will remove these
quoteChars = [ur'‘', ur'’', ur'\u2018', ur'\u2019']
pattern = u'({0})'.format('|'.join(quoteChars))
matched = re.search(pattern, myString) # match against whole string
if matched:
log('AGNT :: Search Query:: Replacing characters in string. Found one of these {0}'.format(quoteChars))
myString = re.sub(pattern, "'", myString)
myString = ' '.join(myString.split()) # remove continous white space
log('AGNT :: Amended Search Query [{0}]'.format(myString))
else:
log('AGNT :: Search Query:: String has none of these {0}'.format(quoteChars))
# Kink seems to fail to find Titles which have invalid chars in them, split at first incident and take first split, just to search but not compare
# the back tick is added to the list as users who can not include quotes in their filenames can use these to replace them without changing the scrappers code
badChars = ["'", '"', '`', ur'\u201c', ur'\u201d']
pattern = u'({0})'.format('|'.join(badChars))
myWords = myString.split()
for count, word in enumerate(myWords):
matched = re.search(pattern, word) # match against first word
if matched:
myWords.remove(myWords[count])
log('AGNT :: Search Query:: Dropping word {0} "{1}". Found one of these {2} characters'.format(count, word, badChars))
myString = ' '.join(myWords)
log('AGNT :: Amended Search Query [{0}]'.format(myString))
else:
log('AGNT :: Search Query:: Word {0} "{1}" has none of these chracters {2}'.format(count, word, badChars))
# string can not be longer than 50 characters
if len(myString) > 50:
lastSpace = myString[:50].rfind(' ')
myString = myString[:lastSpace]
myString = String.StripDiacritics(myString)
myString = String.URLEncode(myString.strip())
# sort out double encoding: & html code %26 for example is encoded as %2526; on MAC OS '*' sometimes appear in the encoded string
myString = myString.replace('%25', '%').replace('*', '')
log('AGNT :: Returned Search Query : {0}'.format(myString))
log(LOG_BIGLINE)
return myString | python | 14 | 0.5981 | 165 | 52.686275 | 51 | inline |
@Test
public void errorHandling() throws Exception {
TestClass5 tc = new TestClass5();
// changing target
// from primitive array to reference type array
int[] is = new int[] {1,2,3};
String[] strings = new String[] {"a","b","c"};
expression = parser.parseExpression("[1]");
assertThat(expression.getValue(is)).isEqualTo(2);
assertCanCompile(expression);
assertThat(expression.getValue(is)).isEqualTo(2);
assertThatExceptionOfType(SpelEvaluationException.class).isThrownBy(() ->
expression.getValue(strings))
.withCauseInstanceOf(ClassCastException.class);
SpelCompiler.revertToInterpreted(expression);
assertThat(expression.getValue(strings)).isEqualTo("b");
assertCanCompile(expression);
assertThat(expression.getValue(strings)).isEqualTo("b");
tc.field = "foo";
expression = parser.parseExpression("seven(field)");
assertCantCompile(expression);
expression.getValue(tc);
assertThat(tc.s).isEqualTo("foo");
assertCanCompile(expression);
tc.reset();
tc.field="bar";
expression.getValue(tc);
// method with changing parameter types (change reference type)
tc.obj = "foo";
expression = parser.parseExpression("seven(obj)");
assertCantCompile(expression);
expression.getValue(tc);
assertThat(tc.s).isEqualTo("foo");
assertCanCompile(expression);
tc.reset();
tc.obj=new Integer(42);
assertThatExceptionOfType(SpelEvaluationException.class).isThrownBy(() ->
expression.getValue(tc))
.withCauseInstanceOf(ClassCastException.class);
// method with changing target
expression = parser.parseExpression("#root.charAt(0)");
assertThat(expression.getValue("abc")).isEqualTo('a');
assertCanCompile(expression);
assertThatExceptionOfType(SpelEvaluationException.class).isThrownBy(() ->
expression.getValue(new Integer(42)))
.withCauseInstanceOf(ClassCastException.class);
} | java | 12 | 0.740622 | 75 | 33.574074 | 54 | inline |
package com.hippo.ehviewer.ui;
import android.app.Activity;
import android.app.Dialog;
import android.content.Context;
import android.graphics.Color;
import android.view.Display;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.hippo.ehviewer.EhApplication;
import com.hippo.ehviewer.GScreenAdapter;
import com.hippo.ehviewer.R;
import com.hippo.yorozuya.StringUtils;
import java.util.Date;
/**
* Created by asdsad on 2017/12/20.
*/
public class CommonDialog extends Dialog{
public enum DialogType{TEXT ,WEBVIEW,INPUT}
public enum ButtonType{BTN_OKCANCEL,BTN_CANCEL}
LinearLayout mContentLayout;
TextView mTitleTx;
Button mLeftBtn,mRightBtn;
Context mContext;
DialogType mDialogType = DialogType.TEXT;
ButtonType mBtnType = ButtonType.BTN_OKCANCEL;
TextView mContentText;
EditText mContentEdit;
// HtmlView mContentWeb;
public CommonDialog(Context context ) {
super(context, R.style.DialogStyle);
mContext = context;
}
public CommonDialog(Context context , DialogType dialogType) {
super(context, R.style.DialogStyle);
mDialogType = dialogType;
mContext = context;
}
public CommonDialog bulider() {
LayoutInflater li = (LayoutInflater) mContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View contentView= li.inflate(R.layout.dlg_commom ,null);
mContentLayout = (LinearLayout)contentView.findViewById(R.id.content);
mTitleTx = (TextView) contentView.findViewById(R.id.title);
mLeftBtn = (Button) contentView.findViewById(R.id.btn_left);
mRightBtn = (Button) contentView.findViewById(R.id.btn_right);
ImageView closeBtn = (ImageView)contentView.findViewById(R.id.dialog_close);
closeBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dismiss();
}
});
//默认右边按钮为取消
mRightBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dismiss();
}
});
buildContentView();
setContentView(contentView);
setCanceledOnTouchOutside(false);
Window dialogWindow = getWindow();
WindowManager m = ((Activity)mContext).getWindowManager();
Display d = m.getDefaultDisplay(); // 获取屏幕宽、高用
WindowManager.LayoutParams p = dialogWindow.getAttributes(); // 获取对话框当前的参数值
p.width = (int) (d.getWidth()- GScreenAdapter.instance().dip2px(10));
getWindow().setAttributes(p);
return this;
}
private void buildContentView() {
if(mDialogType == DialogType.TEXT){
mContentText = new TextView(getContext());
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.WRAP_CONTENT
);
lp.gravity = Gravity.CENTER;
lp.rightMargin =GScreenAdapter.instance().dip2px(10);
lp.leftMargin =GScreenAdapter.instance().dip2px(10);
mContentText.setTextSize(16);
mContentText.setTextColor(Color.BLACK);
mContentLayout.addView(mContentText , lp);
}
// if(mDialogType == DialogType.WEBVIEW){
// LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(
// LinearLayout.LayoutParams.MATCH_PARENT,
// GScreenAdapter.instance().dip2px(220)
// );
// mContentWeb = new HtmlView(mContext);
// mContentLayout.addView(mContentWeb , lp);
// }
if(mDialogType == DialogType.INPUT){
mContentEdit = new EditText(getContext());
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.WRAP_CONTENT
);
lp.setMargins(20,5,20,0);
mContentLayout.addView(mContentEdit , lp);
}
}
public CommonDialog setButtonType(ButtonType type){
if(mBtnType == ButtonType.BTN_CANCEL&&mLeftBtn!=null){
mLeftBtn.setVisibility(View.INVISIBLE);
}
return this;
}
// public CommonDialog setUrl( String url){
// if(!StringUtils.equals(url,"")&&mContentWeb!=null){
// url = url + "?time="+String.valueOf (new Date().getTime());
// mContentWeb.load(url);
//
// }else{
// setContentViewShow(false);
// }
// return this;
// }
public CommonDialog setMessage(String msg){
if(!StringUtils.equals(msg,"")&&mContentText!=null){
mContentText.setText(msg);
}
return this;
}
public CommonDialog setTitle(String title){
if(!StringUtils.equals(title,"")&&mTitleTx!=null){
mTitleTx.setText(title);
}
return this;
}
public CommonDialog setLeftButtonText(String label){
if(!StringUtils.equals(label,"")&&mLeftBtn!=null)
mLeftBtn.setText(label);
return this;
}
public CommonDialog setRightButtonText(String label){
if(!StringUtils.equals(label,"")&&mRightBtn!=null)
mRightBtn.setText(label);
return this;
}
public CommonDialog setLeftButtonClickListener(View.OnClickListener listener){
if(listener!=null&&mLeftBtn!=null){
mLeftBtn.setOnClickListener(listener);
}
return this;
}
public CommonDialog setRightButtonClickListener(View.OnClickListener listener){
if(listener!=null&&mRightBtn!=null){
mRightBtn.setOnClickListener(listener);
}
return this;
}
public String getInputText(){
if(mContentLayout!=null)
return mContentEdit.getText().toString();
return "";
}
public void setContentViewShow(boolean isShow){
mContentLayout.setVisibility(isShow ? View.VISIBLE :View.GONE);
}
public void editClose(){
InputMethodManager imm = (InputMethodManager) EhApplication.getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(mContentEdit.getWindowToken(), 0);
dismiss();
}
} | java | 15 | 0.641493 | 128 | 31.211538 | 208 | starcoderdata |
<?php
namespace App\Pipeline;
use Closure;
class FirstPipe
{
/**
* Переводит первый символ строки в верхний регистр
*
* @param string $string
* @param Closure $next
* @return mixed
*/
public function handle($data, Closure $next)
{
$data['first_pipe'] = true;
return $next($data);
}
} | php | 10 | 0.59126 | 55 | 15.913043 | 23 | starcoderdata |
// Copyright © 2019
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"errors"
"os"
"github.com/spf13/cobra"
)
// unifyCmd represents the unify command
var unifyCmd = &cobra.Command{
Use: "unify [output path]",
Short: "Unifies 2 directories where input is the source",
Long: `Uploads all files (with respect to .driveignores)
aswell as removes legacy files from the drive sync folder.
Its an alias for: 'driveignore upload [args] [flags] --force' + 'driveignore clean [args] [flags]'`,
RunE: func(cmd *cobra.Command, args []string) error {
// set flags
uploadForce = true
uploadMergeIgnores = unifyMergeIgnores
uploadInput = unifyInput
cleanInput = unifyInput
// call commands
errs := make(chan error, 2)
go func() {
errs <- uploadCmd.RunE(cmd, args)
}()
go func() {
errs <- cleanCmd.RunE(cmd, args)
}()
for i := 0; i < cap(errs); i++ {
if err := <-errs; err != nil {
return err
}
}
return nil
},
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
return errors.New("There should only be one argument")
}
fstat, err := os.Stat(args[0])
if os.IsNotExist(err) {
return errors.New("Passed path doesnt exist")
}
if !fstat.IsDir() {
return errors.New("Passed path isnt a directory")
}
return nil
},
}
var unifyInput string
var unifyMergeIgnores bool
func init() {
rootCmd.AddCommand(unifyCmd)
// local flags
unifyCmd.Flags().StringVarP(&unifyInput, "input", "i", ".", "Input directory of the files to be uploaded")
unifyCmd.Flags().BoolVarP(&unifyMergeIgnores, "merge-ignores", "M", false, "Merges global and input dir .driveignore")
} | go | 18 | 0.687189 | 119 | 26.271605 | 81 | starcoderdata |
/* $XFree86: xc/programs/Xserver/hw/xfree86/reconfig/os.h,v 3.6 1996/12/23 06:51:43 dawes Exp $ */
/* $XConsortium: os.h /main/5 1996/10/19 18:08:25 kaleb $ */
#include
#include
#include | c | 8 | 0.709265 | 98 | 23.076923 | 13 | starcoderdata |
/* Import node modules */
import React from 'react';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
import { library } from '@fortawesome/fontawesome-svg-core'
import { faChevronUp } from '@fortawesome/free-solid-svg-icons'
/* Import own modules */
import './PageScroller.css';
export default class PageScroller extends React.Component {
constructor(props) {
super(props);
library.add(faChevronUp);
this.state = {
visible: true,
};
}
visible(visible){
this.setState( { visible: visible } )
}
render() {
return (
<a id="pagescroller" href="#home" className={this.state.visible===true?'d-block':'d-none'} onClick={this.props.scrollTo}>
<FontAwesomeIcon icon="chevron-up" />
);
}
} | javascript | 16 | 0.599291 | 133 | 23.882353 | 34 | starcoderdata |
/* Hibernate, Relational Persistence for Idiomatic Java
*
* SPDX-License-Identifier: Apache-2.0
* Copyright: Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.reactive.provider.service;
/**
* A singleton {@link ReactiveMarkerService} that marks the registry as running in "Reactive mode" allowing
* the registration of reactive components.
*/
public final class ReactiveMarkerServiceSingleton implements ReactiveMarkerService {
public static final ReactiveMarkerServiceSingleton INSTANCE = new ReactiveMarkerServiceSingleton();
private ReactiveMarkerServiceSingleton(){}
} | java | 7 | 0.798995 | 107 | 30.421053 | 19 | starcoderdata |
#pragma once
#include "7Vector.h"
#include "ser.h"
class CGraphData_old
{
public:
CGraphData_old();
CGraphData_old(int theDataType,int theDataObjectID,int theDataObjectAuxID);
virtual ~CGraphData_old();
// Various
int getDataType() const;
int getDataObjectID() const;
int getDataObjectAuxID() const;
void setDataObjectID(int newID);
void setDataObjectAuxID(int newID);
void setZoomFactor(float newZoomFactor);
float getZoomFactor() const;
void setAddCoeff(float newCoeff);
float getAddCoeff() const;
void setVisible(bool v);
bool getVisible() const;
void setIdentifier(int newIdentifier);
int getIdentifier() const;
void setLinkPoints(bool l);
bool getLinkPoints() const;
std::string getName() const;
void setName(std::string theName);
void setLabel(bool l);
bool getLabel() const;
void resetData(int bufferSize);
void setDerivativeIntegralAndCumulative(int val);
int getDerivativeIntegralAndCumulative() const;
void setValue(const C7Vector* graphCTM,int absIndex,bool firstValue,bool cyclic,float range,const std::vector times);
void setValueDirect(int absIndex,float theValue,bool firstValue,bool cyclic,float range,const std::vector times);
bool getValue(int absIndex,float& v) const;
bool getValueRaw(int absIndex,float& v) const;
int getDataLength();
void setUserData(float data);
void clearUserData();
void setMovingAverageCount(int c);
int getMovingAverageCount() const;
void serialize(CSer& ar,void* it);
CGraphData_old* copyYourself();
void performObjectLoadingMapping(const std::vector map);
void performCollisionLoadingMapping(const std::vector map);
void performDistanceLoadingMapping(const std::vector map);
void performIkLoadingMapping(const std::vector map);
bool announceObjectWillBeErased(int objID,bool copyBuffer);
bool announceCollisionWillBeErased(int collisionID,bool copyBuffer);
bool announceDistanceWillBeErased(int distanceID,bool copyBuffer);
bool announceIkObjectWillBeErased(int ikGroupID,bool copyBuffer);
// Variables which need to be serialized & copied
float ambientColor[3];
protected:
// Variables which need to be serialized & copied
std::vector _floatData;
std::vector _transformedFloatData;
std::vector <unsigned char> _floatDataValidFlags;
std::vector <unsigned char> _transformedFloatDataValidFlags;
int dataType;
int dataObjectID;
int dataObjectAuxID;
float zoomFactor;
float addCoeff;
int identifier;
bool visible;
bool linkPoints;
bool label;
int _derivativeIntegralAndCumulative;
std::string name;
private:
int _lifeID;
float _userData;
bool _userDataValid;
int _movingAverageCount;
}; | c | 9 | 0.725975 | 129 | 31.636364 | 88 | starcoderdata |
<div class="container">
<!-- Page Heading
<h1 class="h3 mb-2 text-gray-800">Konfirmasi Konsultasi
-->
<!-- DataTales Example -->
<div class="card shadow mb-4">
<div class="card-header py-3">
<h4 class="m-0 font-weight-bold text-primary">Konfirmasi Konsultasi
<div class="card-body">
<div class="table-responsive">
<table class="table table-bordered" id="laporan_konsultasi" width="100%">
<th width="1%">
<th width="12%">
<th width="7%">
<th width="10%">
KTP
<th width="5%">
Permohonan
<th width="5%">
Permohonan
<th width="5%">
Informasi
<th width="18%">
yang diminta
<th width="">
<!-- /.container-fluid -->
<script src="<?= base_url('assets/'); ?>vendor/jquery/jquery.min.js">
<script type="text/javascript" language="javascript">
$('#laporan_konsultasi').ready(function() {
var c = $('#laporan_konsultasi').DataTable();
load_data();
function load_data() {
$.ajax({
url: '<?php echo site_url('AdminControl/get_all_konsultasi') ?>',
dataType: "JSON",
success: function(data) {
c.clear().draw();
var HTMLbuilder = "";
for (var i = 0; i < data.length; i++) {
var btn1 = '<button type="button" name="btn_terima" id="' + data[i]['no_konsul'] + '" class="btn btn-xs btn-primary btn-circle btn_terima"><i class="fas fa-check">
var btn2 = '<button type="button" name="btn_delete" id="' + data[i]['no_konsul'] + '" class="btn btn-xs btn-danger btn-circle btn_tolak"><i class="fas fa-trash">
var imgHtml = "<img src='../assets/img/" + data[i]['ktp'] + "' width='150' height='100'>";
// HTMLbuilder = HTMLbuilder + imgHtml;
c.row.add([
" + [i + 1] + "
" + data[i]['nama_permohon'] + "
" + data[i]['no_telepon'] + "
" + imgHtml + "
" + data[i]['tanggal_permohonan'] + "
" + data[i]['waktu_permohonan'] + "
" + data[i]['jenis_informasi'] + "
" + data[i]['tujuan_informasi'] + "
" + btn1 + btn2 + "
]).draw();
}
}
});
}
$(document).on("click", ".btn_terima", function() {
var no_konsul = $(this).attr('id');
var status = 'terima';
var view = 1;
$.ajax({
url: "<?php echo site_url('AdminControl/konfirmasi_konsul'); ?>",
method: "POST",
data: {
no_konsul: no_konsul,
status: status,
view: view
},
success: function(data) {
load_unseen_notification();
load_data();
swal({
title: 'Konfirmasi Berhasil',
text: '',
type: 'success'
});
}
});
});
$(document).on("click", ".btn_tolak", function() {
var no_konsul = $(this).attr('id');
var status = 'ditolak';
var view = 0;
swal({
title: "Tolak Konsultasi",
text: "Apakah anda yakin akan Menolak Konsultasi ini?",
type: "warning",
showCancelButton: true,
confirmButtonText: "Hapus",
closeOnConfirm: true,
},
function() {
$.ajax({
url: "<?php echo site_url('AdminControl/konfirmasi_konsul'); ?>",
method: "POST",
data: {
no_konsul: no_konsul,
status: status,
view: view
},
success: function(data) {
load_data();
swal({
title: 'Berhasil Ditolak',
text: '',
type: 'success'
});
}
});
});
});
}); | php | 6 | 0.523021 | 184 | 26.373333 | 150 | starcoderdata |
import React from 'react';
import PropTypes from 'prop-types';
const ReadyPrompt = ({ show }) => (
<h1 className={`ready-prompt ${show ? 'show' : ''}`}>
<br />
Get ready!
);
ReadyPrompt.propTypes = {
show: PropTypes.bool.isRequired,
};
export default ReadyPrompt; | javascript | 10 | 0.685333 | 87 | 22.4375 | 16 | starcoderdata |
"use strict";
const referee = require("@sinonjs/referee");
// adapted from https://stackoverflow.com/a/40200710
function isPrime(number){
for (var i = 2; i < number; i++){
if (number % i === 0){
return false;
}
}
return number !== 1 && number !== 0;
}
referee.add("isPrime", {
assert: function assert(actual) {
if (typeof actual !== "number" || actual < 0) {
throw new TypeError("'actual' argument should be a non-negative Number");
}
return isPrime(actual);
},
assertMessage: "Expected ${actual} to be a prime number",
refuteMessage: "Expected ${actual} to not be a prime number",
expectation: "toBePrime"
}); | javascript | 14 | 0.585915 | 85 | 27.4 | 25 | starcoderdata |
/**
*
*/
package grapheus.persistence.model.vocabulary;
import com.arangodb.entity.DocumentField;
import com.arangodb.entity.DocumentField.Type;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.Setter;
import lombok.ToString;
import grapheus.persistence.model.annotation.Entity;
import grapheus.persistence.model.annotation.Index;
/**
* @author black
*
*/
@Entity(name = "VOCABULARY")
@Getter
@Setter
@Builder
@AllArgsConstructor
@NoArgsConstructor
@ToString(of={"scope", "term"})
@Index(fields = {"scope", "term"})
@Index(fields = "scope", unique=false)
public class VocabularyTerm {
public final static String FIELD_TERM = "term";
public final static String FIELD_COUNT = "mentionsCount";
public static final String FIELD_SCOPE = "scope";
@DocumentField(Type.KEY)
private String id;
private String term;
@DocumentField(Type.REV)
private String rev;
@NonNull
private String scope;
private int mentionsCount;
} | java | 8 | 0.728191 | 61 | 21.22449 | 49 | starcoderdata |
package com.txusballesteros.bubbles.app;
import android.telephony.PhoneStateListener;
import android.telephony.TelephonyManager;
import android.util.Log;
/**
* Created by gopikrishna on 6/13/16.
*/
public class CustomPhoneStateListener extends PhoneStateListener {
private static final String TAG = "CustomPhoneStateListene";
@Override
public void onCallStateChanged(int state, String incomingNumber){
// if(incomingNumber!=null&&incomingNumber.length()>0) incoming_nr=incomingNumber;
switch(state){
case TelephonyManager.CALL_STATE_RINGING:
Log.d(TAG, "CALL_STATE_RINGING");
break;
case TelephonyManager.CALL_STATE_OFFHOOK:
Log.d(TAG, "CALL_STATE_OFFHOOK");
break;
case TelephonyManager.CALL_STATE_IDLE:
Log.d(TAG, "CALL_STATE_IDLE==>");
break;
}
}
} | java | 12 | 0.639485 | 89 | 28.125 | 32 | starcoderdata |
package com.friends.repo;
import com.friends.domain.ProfileAccount;
import org.springframework.data.repository.CrudRepository;
/**
* Created by on 3/18/2017.
*/
public interface ProfileAccountRepository extends CrudRepository<ProfileAccount, String>{
} | java | 6 | 0.799242 | 89 | 25.4 | 10 | starcoderdata |
using Leelite.Framework.Service;
using Leelite.Modules.Identity.Dtos.UserDtos;
using Leelite.Modules.Identity.Models.UserAgg;
namespace Leelite.Modules.Identity.Interfaces
{
public interface IUserService : ICrudService<User, long, UserDto, UserCreateRequest, UserUpdateRequest, UserQueryParameter>
{
}
} | c# | 7 | 0.82093 | 127 | 38.090909 | 11 | starcoderdata |
package dbsettingsrepository
import "github.com/wavelaunch/pikachu-sql/api/internal/domain"
type jsonConfigFileLoader struct {
configFilePath string
}
func NewJsonConfigFileLoader(configFilePath string) *jsonConfigFileLoader {
return &jsonConfigFileLoader{
configFilePath: configFilePath,
}
}
func (l *jsonConfigFileLoader) Find(id string) (domain.DatabaseConnection, error) {
return domain.DatabaseConnection{}, nil
}
func (l *jsonConfigFileLoader) FindAll() ([]domain.DatabaseConnection, error) {
return []domain.DatabaseConnection{}, nil
} | go | 10 | 0.798198 | 83 | 25.428571 | 21 | starcoderdata |
package ink.anyway.component.common.util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
/**
* Xml工具类
* @author 李海博
* @version v1.0
*
*/
public class JaxbXmlUtil {
private static final Logger logger = LoggerFactory.getLogger(JaxbXmlUtil.class);
public static String xmlTop = "";
static {
StringBuilder sb = new StringBuilder();
sb.append("<?xml version=").append('"').append("1.0").append('"')
.append(" encoding=").append('"').append("GB18030").append('"')
.append(" standalone=").append('"').append("yes").append('"')
.append("?>");
xmlTop = sb.toString();
}
public static String objectToXml(Object object) {
String resV = "";
try {
ByteArrayOutputStream os = new ByteArrayOutputStream();
JAXBContext context = JAXBContext.newInstance(object.getClass());
Marshaller marshaller = context.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_ENCODING, "GB18030");
marshaller.marshal(object, os);
resV = new String(os.toByteArray(), "GB18030");
} catch (Exception e) {
e.printStackTrace();
}
return resV;
}
public static Object xmlToObject(Class classOb, String xml) {
Object resV = null;
try {
ByteArrayInputStream is = new ByteArrayInputStream(
xml.getBytes("GB18030"));
JAXBContext context = JAXBContext.newInstance(classOb);
Unmarshaller unmarshaller = context.createUnmarshaller();
resV = unmarshaller.unmarshal(is);
} catch (Exception e) {
e.printStackTrace();
}
return resV;
}
public static boolean objectToXmlFile(Object object, String filePath) {
boolean bResult = false;
// 文件目录
File file = new File(filePath);
// 文件否
if (file.isDirectory()) {
// 目录自动创建,并文件名自动获取,文件名使用类名
if (!file.exists()) {
if (!file.mkdir()) {
logger.warn("[" + JaxbXmlUtil.class.getName()
+ "]目录创建失败!!!");
return false;
}
}
if (!filePath.endsWith("/") || !filePath.endsWith("\\")) {
filePath = filePath + System.getProperty("file.separator");
}
filePath = filePath + object.getClass().getSimpleName() + ".xml";
} else {
int fileSeparatorIndex = filePath.lastIndexOf(System
.getProperty("file.separator"));
String dirStr = filePath.substring(0, fileSeparatorIndex);
File testFile = new File(dirStr);
if (!testFile.exists()) {
if (!testFile.mkdir()) {
logger.warn("[" + JaxbXmlUtil.class.getName()
+ "]目录创建失败!!!");
return false;
}
}
}
File xmlFile = new File(filePath);
try {
JAXBContext context = JAXBContext.newInstance(object.getClass());
Marshaller marshaller = context.createMarshaller();
marshaller.marshal(object, new FileOutputStream(xmlFile));
bResult = true;
} catch (JAXBException e) {
logger.warn("JAXB失败!!!");
e.printStackTrace();
} catch (FileNotFoundException e) {
logger.warn("文件未找到!!!");
e.printStackTrace();
}
return bResult;
}
public static Object xmlFileToObject(Class class1, String filePath) {
Object rObject = null;
File xmlFile = new File(filePath);
if (xmlFile.isDirectory()) {
if (!filePath.endsWith("/") || !filePath.endsWith("\\")) {
filePath = filePath + System.getProperty("file.separator");
}
filePath = filePath + class1.getSimpleName() + ".xml";
xmlFile = new File(filePath);
if (!xmlFile.exists()) {
logger.warn("文件不存在!!!");
return null;
}
} else {
if (!xmlFile.exists()) {
logger.warn("文件不存在!!!");
return null;
}
}
try {
JAXBContext context = JAXBContext.newInstance(class1);
Unmarshaller unmarshaller = context.createUnmarshaller();
rObject = unmarshaller.unmarshal(xmlFile);
} catch (JAXBException e) {
logger.warn("JAXB失败!!!");
e.printStackTrace();
}
return rObject;
}
} | java | 21 | 0.672022 | 81 | 26.903448 | 145 | starcoderdata |
explicit AsyncExecRequest(unsigned& requestID) :
requestID_(requestID)
{
// Increment ID for next request
++requestID;
if (requestID == M_MAX_UNSIGNED)
requestID = 1;
} | c++ | 6 | 0.555066 | 48 | 26.625 | 8 | inline |
from __future__ import print_function
import argparse
import disasmlib
import subprocess
import sys
if sys.version_info < (3,):
from StringIO import StringIO
else:
from io import StringIO
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument('--toolchain', default=None)
argparser.add_argument('--target', '-T', choices=('dot', 'png', 'svg'), default='dot')
argparser.add_argument('--hide-alone', default=False, action='store_true')
argparser.add_argument('elf')
args = argparser.parse_args()
elfpath = args.elf
elf = disasmlib.ElfFile(elfpath)
elf.set_toolchain(args.toolchain)
elf.read()
def print_cfg(f):
print('digraph test {', file=f)
for func in elf.disasm.funcs:
jumpfuncs = list()
past = list()
for block in func.blocks:
for postblock in block.postblocks:
if postblock.func not in past and postblock.func != func:
jumpfuncs.append(postblock.func)
if postblock.func not in past:
past.append(postblock.func)
if len(jumpfuncs) + len(func.calleefuncs) == 0:
print('"%s";' % (func.name), file=f)
continue
for callee in func.calleefuncs:
print('"%s" -> "%s";' % (func.name, callee.name), file=f)
for jump in jumpfuncs:
print('"%s" -> "%s" [style="dashed"];' % (func.name, jump.name), file=f)
print('}', file=f)
if args.target == 'dot':
f = open(elfpath + '.call.dot', 'w')
else:
f = StringIO()
print_cfg(f)
p = subprocess.Popen(
['dot', '-T', args.target, '-o', elfpath + '.call.dot.' + args.target],
stdin=subprocess.PIPE)
p.communicate(input=f.getvalue().encode())
if __name__ == '__main__':
main() | python | 17 | 0.552262 | 90 | 32.736842 | 57 | starcoderdata |
//
// GangMJExtension.h
// GangMJExtension
//
// Created by mj on 14-1-15.
// Copyright (c) 2014年 小码哥. All rights reserved.
// 代码地址:https://github.com/CoderMJLee/GangMJExtension
// 代码地址:http://code4app.com/ios/%E5%AD%97%E5%85%B8-JSON-%E4%B8%8E%E6%A8%A1%E5%9E%8B%E7%9A%84%E8%BD%AC%E6%8D%A2/5339992a933bf062608b4c57
#import "NSObject+GangMJCoding.h"
#import "NSObject+GangMJProperty.h"
#import "NSObject+GangMJClass.h"
#import "NSObject+GangMJKeyValue.h"
#import "NSString+GangMJExtension.h"
#import "GangMJExtensionConst.h" | c | 7 | 0.757119 | 136 | 36.3125 | 16 | starcoderdata |
#region Licence
/**
* Copyright © 2014-2018 OTTools
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#endregion
#region Using Statements
using ItemEditor;
using System;
using System.Reflection;
#endregion
namespace OTLib.Server.Items
{
public enum ServerItemGroup : byte
{
None = 0,
Ground = 1,
Container = 2,
Weapon = 3,
Ammunition = 4,
Armor = 5,
Changes = 6,
Teleport = 7,
MagicField = 8,
Writable = 9,
Key = 10,
Splash = 11,
Fluid = 12,
Door = 13,
Deprecated = 14
}
public enum ServerItemType : byte
{
None = 0,
Ground = 1,
Container = 2,
Fluid = 3,
Splash = 4,
Deprecated = 5
}
public enum TileStackOrder : byte
{
None = 0,
Border = 1,
Bottom = 2,
Top = 3
}
public class ServerItem : Item
{
#region Contructors
public ServerItem()
{
////
}
public ServerItem(Item item)
{
if (item == null)
{
throw new ArgumentNullException("item");
}
this.CopyPropertiesFrom(item);
}
#endregion
#region Public Properties
public ushort ClientId { get; set; }
public ushort PreviousClientId { get; set; }
///
/// Used during an update to indicate if this item has been updated.
///
public bool SpriteAssigned { get; set; }
///
/// An custom created item id.
///
public bool IsCustomCreated { get; set; }
#endregion
#region Public Methods
override public string ToString()
{
if (!string.IsNullOrEmpty(Name))
{
return this.ID.ToString() + " - " + this.Name;
}
return this.ID.ToString();
}
#endregion
}
} | c# | 16 | 0.542474 | 76 | 22.840336 | 119 | starcoderdata |
listA = []
listB = []
listC = []
indice = 0
for i in range(1, 26):
n = int(input())
listA.append(n)
for c in range(1, 26):
n2 = int(input())
listB.append(n2)
while indice < len(listA):
listC.append(listA[indice])
listC.append(listB[indice])
indice += 1
print(f'{listA}\n{listB}\n{listC}') | python | 9 | 0.602778 | 36 | 15.363636 | 22 | starcoderdata |
'use strict'
const { Bearer } = require('permit')
const KnownError = require('../utils/KnownError')
const ttl = require('../utils/ttl')
const tokens = require('../database/tokens')
const permit = new Bearer({ query: 'token' })
module.exports = async (req) => {
const token = permit.check(req)
// Token not in request
if (token == null) {
return new KnownError('Token missing')
}
const entry = await tokens.get(token)
// Token not in database
if (entry == null) {
return new KnownError('Token invalid')
}
const valid = ttl(entry.updated, process.env.ACKEE_TTL)
// Token too old
if (valid === false) {
return new KnownError('Token invalid')
}
await tokens.update(token)
return true
} | javascript | 7 | 0.675202 | 56 | 18.051282 | 39 | starcoderdata |
using FluentNHibernate.Conventions.Instances;
using FluentNHibernate.Conventions.Inspections;
namespace FluentNHibernate.Conventions
{
public interface ICollectionConvention : IConvention<ICollectionInspector, ICollectionInstance>
{}
} | c# | 7 | 0.836991 | 99 | 33.777778 | 9 | starcoderdata |
[Test]
public static void TestChattelReader_GetAssetAsync2_LocalCacheIsWritten() {
// Simply need to verify that CacheRule.Normal is in effect.
var server = Substitute.For<IAssetServer>();
var config = new ChattelConfiguration(LOCAL_STORAGE_DIR_INFO.FullName, server);
var localStorage = Substitute.For<IChattelLocalStorage>();
var asset = new StratusAsset {
Id = Guid.NewGuid(),
};
server.RequestAssetSync(asset.Id).Returns(asset);
var reader = new ChattelReader(config, localStorage);
reader.GetAssetAsync(asset.Id, resultAsset => {});
localStorage.Received(1).StoreAsset(asset);
} | c# | 13 | 0.730032 | 82 | 32 | 19 | inline |
using Microsoft.Research.SpeechWriter.Core.Items;
using NUnit.Framework;
using System.Collections.Generic;
using System.Linq;
namespace Microsoft.Research.SpeechWriter.Core.Test
{
[Parallelizable(ParallelScope.All)]
public class SuggestionTest
{
private class SeededEnvironment : DefaultWriterEnvironment, IWriterEnvironment
{
private readonly string[] _seeds;
internal SeededEnvironment(params string[] seeds)
{
_seeds = seeds;
}
///
/// Dictionary of words, listed from most likely to least likely.
///
/// of words.
public IEnumerable GetOrderedSeedWords()
{
return _seeds;
}
}
private static ApplicationModel CreateModel(params string[] seeds)
{
var environment = new SeededEnvironment(seeds);
var model = new ApplicationModel(environment);
return model;
}
[Test]
public void EmptyWords()
{
var model = CreateModel();
Assert.AreEqual(2, model.SuggestionLists.Count);
Assert.AreEqual(3, model.SuggestionInterstitials.Count);
Assert.IsInstanceOf
Assert.IsInstanceOf
Assert.IsInstanceOf
}
[Test]
public void LonelyX()
{
var model = CreateModel("X");
Assert.AreEqual(3, model.SuggestionLists.Count);
Assert.AreEqual(1, model.SuggestionLists[2].Count());
var tile = model.SuggestionLists[2].First();
Assert.IsInstanceOf
var suggestedWordTile = (SuggestedWordItem)tile;
Assert.AreEqual("X", suggestedWordTile.FormattedContent);
Assert.AreEqual(4, model.SuggestionInterstitials.Count);
Assert.IsInstanceOf
Assert.IsInstanceOf
Assert.IsInstanceOf
Assert.IsInstanceOf
}
[Test]
public void TwoXs()
{
var model = CreateModel("X", "x");
Assert.AreEqual(3, model.SuggestionLists.Count);
Assert.AreEqual(1, model.SuggestionLists[2].Count());
var tile = model.SuggestionLists[2].First();
Assert.IsInstanceOf
var suggestedWordTile = (SuggestedWordItem)tile;
Assert.AreEqual("X", suggestedWordTile.FormattedContent);
Assert.AreEqual(4, model.SuggestionInterstitials.Count);
Assert.IsInstanceOf
Assert.IsInstanceOf
Assert.IsInstanceOf
Assert.IsInstanceOf
}
}
} | c# | 16 | 0.644942 | 101 | 38.651685 | 89 | starcoderdata |
/*
* Copyright 2014-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghost.framework.data.jdbc.jpa.plugin.repository.cdi;
//import ghost.framework.data.repository.core.NamedQueries;
//import ghost.framework.data.repository.core.support.QueryCreationListener;
//import ghost.framework.data.repository.core.support.RepositoryProxyPostProcessor;
//import ghost.framework.data.repository.query.QueryLookupStrategy;
//import ghost.framework.data.repository.query.QueryMethodEvaluationContextProvider;
import ghost.framework.data.commons.repository.QueryMethodEvaluationContextProvider;
import ghost.framework.data.commons.repository.core.QueryCreationListener;
import ghost.framework.data.commons.repository.core.RepositoryProxyPostProcessor;
import ghost.framework.data.commons.repository.query.QueryLookupStrategy;
import javax.persistence.NamedQueries;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
/**
* Interface containing the configurable options for the Spring Data repository subsystem using CDI.
*
* @author
* @author
* @author
*/
public interface CdiRepositoryConfiguration {
/**
* Return the {@link QueryMethodEvaluationContextProvider} to use. Can be {@link Optional#empty()} .
*
* @return the optional {@link QueryMethodEvaluationContextProvider} base to use, can be {@link Optional#empty()},
* must not be {@literal null}.
* @since 2.1
*/
default Optional getEvaluationContextProvider() {
return Optional.empty();
}
/**
* Return the {@link NamedQueries} to use. Can be {@link Optional#empty()}.
*
* @return the optional named queries to use, can be {@link Optional#empty()}, must not be {@literal null}.
* @since 2.1
*/
default Optional getNamedQueries() {
return Optional.empty();
}
/**
* Return the {@link QueryLookupStrategy.Key} to lookup queries. Can be {@link Optional#empty()}.
*
* @return the lookup strategy to use, can be {@link Optional#empty()}, must not be {@literal null}.
* @since 2.1
*/
default Optional getQueryLookupStrategy() {
return Optional.empty();
}
/**
* Return the {@link Class repository base class} to use. Can be {@link Optional#empty()} .
*
* @return the optional repository base to use, can be {@link Optional#empty()}, must not be {@literal null}.
* @since 2.1
*/
default Optional getRepositoryBeanClass() {
return Optional.empty();
}
/**
* Returns the configured postfix to be used for looking up custom implementation classes.
*
* @return the postfix to use, must not be {@literal null}.
*/
default String getRepositoryImplementationPostfix() {
return "Impl";
}
/**
* Returns the list of {@link RepositoryProxyPostProcessor} to be used during repository proxy creation. Can be
* {@link Collections#emptyList()} .
*
* @return the list of repository proxy post processors to use, can be {@link Collections#emptyList()}, must not be
* {@literal null}.
* @since 2.2
*/
default List getRepositoryProxyPostProcessors() {
return Collections.emptyList();
}
/**
* Returns the list of {@link QueryCreationListener} to be used during repository proxy creation. Can be
* {@link Collections#emptyList()} .
*
* @return the list query creation listeners to use, can be {@link Collections#emptyList()}, must not be
* {@literal null}.
* @since 2.2
*/
default List getQueryCreationListeners() {
return Collections.emptyList();
}
} | java | 9 | 0.734772 | 116 | 35.269565 | 115 | starcoderdata |
package io.metadew.iesi.metadata.configuration.component;
import io.metadew.iesi.connection.tools.SQLTools;
import io.metadew.iesi.metadata.definition.component.ComponentVersion;
import io.metadew.iesi.metadata.execution.MetadataControl;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import javax.sql.rowset.CachedRowSet;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.text.MessageFormat;
import java.util.Optional;
public class ComponentVersionConfiguration {
private final static Logger LOGGER = LogManager.getLogger();
public ComponentVersionConfiguration() {}
public Optional getComponentVersion(String componentId, long componentVersionNumber) {
String queryComponentVersion = "select COMP_ID, COMP_VRS_NB, COMP_VRS_DSC from " + MetadataControl.getInstance().getDesignMetadataRepository().getTableNameByLabel("ComponentVersions")
+ " where COMP_ID = " + SQLTools.GetStringForSQL(componentId) + " and COMP_VRS_NB = " + SQLTools.GetStringForSQL(componentVersionNumber);
CachedRowSet crsComponentVersion = MetadataControl.getInstance().getDesignMetadataRepository().executeQuery(queryComponentVersion, "reader");
try {
if (crsComponentVersion.size() == 0) {
return Optional.empty();
} else if (crsComponentVersion.size() > 1) {
LOGGER.warn(MessageFormat.format("component.version=found multiple descriptions for component id {0} version {1}. " + "Returning first implementation.", componentId, componentVersionNumber));
}
crsComponentVersion.next();
ComponentVersion componentVersion = new ComponentVersion(componentVersionNumber, crsComponentVersion.getString("COMP_VRS_DSC"));
crsComponentVersion.close();
return Optional.of(componentVersion);
} catch (Exception e) {
StringWriter stackTrace = new StringWriter();
e.printStackTrace(new PrintWriter(stackTrace));
LOGGER.warn("exeption=" + e.getMessage());
LOGGER.info("exception.stacktrace=" + stackTrace.toString());
}
return Optional.empty();
}
} | java | 17 | 0.71868 | 207 | 47.782609 | 46 | starcoderdata |
import VueLodash from 'vue-lodash';
import lodash from 'lodash';
export default {
install(Vue) {
Vue.use(VueLodash, { name: '$lodash', lodash });
},
}; | javascript | 11 | 0.645963 | 52 | 19.125 | 8 | starcoderdata |
from cloupy.diagrams.walter_lieth import WalterLieth
import pytest
import pandas as pd
import matplotlib.pyplot as plt
class TestDrawing:
@pytest.fixture
def data_for_humid_period(self):
return pd.DataFrame(
{
'months': {
0: 1, 1: 2, 2: 3, 3: 4, 4: 5, 5: 6, 6: 7, 7: 8, 8: 9, 9: 10,
10: 11, 11: 12
},
'temp': {
0: -1.36, 1: -0.59, 2: 3.03, 3: 8.35, 4: 13.55, 5: 17.02, 6: 18.64,
7: 18.07, 8: 13.76, 9: 8.82, 10: 3.87, 11: 0.36
},
'preci': {
0: 32.70, 1: 26.76, 2: 31.40, 3: 33.63, 4: 51.10, 5: 61.09,
6: 79.37, 7: 57.99, 8: 42.59, 9: 37.12, 10: 37.18, 11: 38.74
},
'temp_max': {
0: 5.45, 1: 6.79, 2: 13.54, 3: 20.07, 4: 24.67, 5: 28.25, 6: 29.65,
7: 29.21, 8: 25.24, 9: 18.88, 10: 11.63, 11: 7.66
},
'temp_min': {
0: -11.22, 1: -10.60, 2: -6.38, 3: -1.66, 4: 2.95, 5: 6.77, 6: 9.38,
7: 8.72, 8: 4.23, 9: 0.31, 10: -3.72, 11: -8.56
}
}
)
@pytest.fixture
def default_settings(self):
return {
'figsize': (7.74, 7.74), 'language': None, 'freeze_rectangles': True,
'title_text': True, 'years_text': True, 'coordinates_box': True,
'yearly_means_box': True, 'extremes_box': True, 'legend_box': True
}
@pytest.fixture
def opposite_settings(self):
return {
'figsize': (7.74, 7.74), 'language': 'POL', 'freeze_rectangles': False,
'title_text': False, 'years_text': False, 'coordinates_box': False,
'yearly_means_box': False, 'extremes_box': False, 'legend_box': False
}
def test_humid_period_and_elements_viewing(
self, default_settings, opposite_settings,
data_for_humid_period
):
for setting, value in opposite_settings.items():
default_settings[setting] = value
plotted_figures_before = plt.gcf().number
wl = WalterLieth(
'POZNAŃ', dataframe=data_for_humid_period, years_range=range(1951, 2020),
lat=52.42, lon=16.83, elevation=92
)
wl.draw(
figsize=default_settings['figsize'], language=default_settings['language'],
freeze_rectangles=default_settings['freeze_rectangles'], title_text=default_settings['title_text'],
years_text=default_settings['years_text'], coordinates_box=default_settings['coordinates_box'],
yearly_means_box=default_settings['yearly_means_box'], extremes_box=default_settings['extremes_box'],
legend_box=default_settings['legend_box']
)
plotted_figures_after = plt.gcf().number
assert plotted_figures_before < plotted_figures_after
def test_dry_period(
self, data_for_humid_period
):
data_for_dry_period = data_for_humid_period
data_for_dry_period.preci = [0] * 12
plotted_figures_before = plt.gcf().number
wl = WalterLieth(
'POZNAŃ', dataframe=data_for_dry_period, years_range=range(1951, 2020),
lat=52.42, lon=16.83, elevation=92
)
wl.draw()
plotted_figures_after = plt.gcf().number
assert plotted_figures_before < plotted_figures_after
def test_mixed_all_periods(
self, data_for_humid_period
):
data_for_mixed_periods = data_for_humid_period
data_for_mixed_periods['preci'] = [110, 70, 50, 40, 30, 20, 0, 0, 20, 30, 70, 110]
plotted_figures_before = plt.gcf().number
wl = WalterLieth(
'POZNAŃ', dataframe=data_for_mixed_periods, years_range=range(1951, 2020),
lat=52.42, lon=16.83, elevation=92
)
wl.draw()
plotted_figures_after = plt.gcf().number
assert plotted_figures_before < plotted_figures_after | python | 14 | 0.522399 | 117 | 38.169811 | 106 | starcoderdata |
package sortedmap
import (
"errors"
"time"
)
// IterChCloser allows records to be read through a channel that is returned by the Records method.
// IterChCloser values should be closed after use using the Close method.
type IterChCloser struct {
ch chan Record
canceled chan struct{}
}
// Close cancels a channel-based iteration and causes the sending goroutine to exit.
// Close should be used after an IterChCloser is finished being read from.
func (iterCh *IterChCloser) Close() error {
close(iterCh.canceled)
return nil
}
// Records returns a channel that records can be read from.
func (iterCh *IterChCloser) Records() <-chan Record {
return iterCh.ch
}
// IterChParams contains configurable settings for CustomIterCh.
// SendTimeout is disabled by default, though it should be set to allow
// channel send goroutines to time-out.
// BufSize is set to 1 if its field is set to a lower value.
// LowerBound and UpperBound default to regular iteration when left unset.
type IterChParams struct {
Reversed bool
SendTimeout time.Duration
BufSize int
LowerBound,
UpperBound interface{}
}
// IterCallbackFunc defines the type of function that is passed into an IterFunc method.
// The function is passed a record value argument.
type IterCallbackFunc func(rec Record) bool
func setBufSize(bufSize int) int {
// initialBufSize must be >= 1 or a blocked channel send goroutine may not exit.
// More info: https://github.com/golang/go/wiki/Timeouts
const initialBufSize = 1
if bufSize < initialBufSize {
return initialBufSize
}
return bufSize
}
func (sm *SortedMap) recordFromIdx(i int) Record {
rec := Record{}
rec.Key = sm.sorted[i]
rec.Val = sm.idx[rec.Key]
return rec
}
func (sm *SortedMap) sendRecord(iterCh IterChCloser, sendTimeout time.Duration, i int) bool {
if sendTimeout <= time.Duration(0) {
select {
case <-iterCh.canceled:
return false
case iterCh.ch <- sm.recordFromIdx(i):
return true
}
}
select {
case <-iterCh.canceled:
return false
case iterCh.ch <- sm.recordFromIdx(i):
return true
case <-time.After(sendTimeout):
return false
}
}
func (sm *SortedMap) iterCh(params IterChParams) (IterChCloser, error) {
iterBounds := sm.boundsIdxSearch(params.LowerBound, params.UpperBound)
if iterBounds == nil {
return IterChCloser{}, errors.New(noValuesErr)
}
iterCh := IterChCloser{
ch: make(chan Record, setBufSize(params.BufSize)),
canceled: make(chan struct{}),
}
go func(params IterChParams, iterCh IterChCloser) {
if params.Reversed {
for i := iterBounds[1]; i >= iterBounds[0]; i-- {
if !sm.sendRecord(iterCh, params.SendTimeout, i) {
break
}
}
} else {
for i := iterBounds[0]; i <= iterBounds[1]; i++ {
if !sm.sendRecord(iterCh, params.SendTimeout, i) {
break
}
}
}
close(iterCh.ch)
}(params, iterCh)
return iterCh, nil
}
func (sm *SortedMap) iterFunc(reversed bool, lowerBound, upperBound interface{}, f IterCallbackFunc) error {
iterBounds := sm.boundsIdxSearch(lowerBound, upperBound)
if iterBounds == nil {
return errors.New(noValuesErr)
}
if reversed {
for i := iterBounds[1]; i >= iterBounds[0]; i-- {
if !f(sm.recordFromIdx(i)) {
break
}
}
} else {
for i := iterBounds[0]; i <= iterBounds[1]; i++ {
if !f(sm.recordFromIdx(i)) {
break
}
}
}
return nil
}
// IterCh returns a channel that sorted records can be read from and processed.
// This method defaults to the expected behavior of blocking until a read, with no timeout.
func (sm *SortedMap) IterCh() (IterChCloser, error) {
return sm.iterCh(IterChParams{})
}
// BoundedIterCh returns a channel that sorted records can be read from and processed.
// BoundedIterCh starts at the lower bound value and sends all values in the collection until reaching the upper bounds value.
// Sort order is reversed if the reversed argument is set to true.
// This method defaults to the expected behavior of blocking until a channel send completes, with no timeout.
func (sm *SortedMap) BoundedIterCh(reversed bool, lowerBound, upperBound interface{}) (IterChCloser, error) {
return sm.iterCh(IterChParams{
Reversed: reversed,
LowerBound: lowerBound,
UpperBound: upperBound,
})
}
// CustomIterCh returns a channel that sorted records can be read from and processed.
// CustomIterCh starts at the lower bound value and sends all values in the collection until reaching the upper bounds value.
// Sort order is reversed if the reversed argument is set to true.
// This method defaults to the expected behavior of blocking until a channel send completes, with no timeout.
func (sm *SortedMap) CustomIterCh(params IterChParams) (IterChCloser, error) {
return sm.iterCh(params)
}
// IterFunc passes each record to the specified callback function.
// Sort order is reversed if the reversed argument is set to true.
func (sm *SortedMap) IterFunc(reversed bool, f IterCallbackFunc) {
sm.iterFunc(reversed, nil, nil, f)
}
// BoundedIterFunc starts at the lower bound value and passes all values in the collection to the callback function until reaching the upper bounds value.
// Sort order is reversed if the reversed argument is set to true.
func (sm *SortedMap) BoundedIterFunc(reversed bool, lowerBound, upperBound interface{}, f IterCallbackFunc) error {
return sm.iterFunc(reversed, lowerBound, upperBound, f)
} | go | 16 | 0.732262 | 154 | 29.066667 | 180 | starcoderdata |
package csvparser
import (
"fmt"
"reflect"
"strconv"
)
func getTagNames(i interface{}) []string {
var tags []string
s := reflect.ValueOf(i)
e := s.Index(0)
for i := 0; i < e.NumField(); i++ {
tag := e.Type().Field(i).Tag
tagName, _ := tag.Lookup("csv")
tags = append(tags, tagName)
}
return tags
}
func getFieldValues(i interface{}) [][]string {
s := reflect.ValueOf(i)
records := make([][]string, 0)
for i := 0; i < s.Len(); i++ {
e := s.Index(i)
raw := make([]string, 0)
for j := 0; j < e.NumField(); j++ {
var val string
field := e.Field(j)
kind := e.Type().Field(j).Type.Kind()
switch kind {
case reflect.String:
val = field.String()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
n := field.Int()
val = strconv.Itoa(int(n))
case reflect.Float32, reflect.Float64:
n := field.Float()
val = strconv.FormatFloat(n, 'E', -1, 64)
case reflect.Bool:
n := field.Bool()
val = strconv.FormatBool(n)
}
raw = append(raw, val)
}
records = append(records, raw)
}
return records
//e := reflect.ValueOf(i) //.Elem()
}
// GetFieldNameTypeAndValue ...
func GetFieldNameTypeAndValue(i interface{}) {
e := reflect.ValueOf(i) //.Elem()
for i := 0; i < e.NumField(); i++ {
varName := e.Type().Field(i).Name
varType := e.Type().Field(i).Type
varValue := e.Field(i).Interface()
fmt.Printf("%v %v %v\n", varName, varType, varValue)
}
}
// Marshal ...
func Marshal(i interface{}) [][]string {
data := make([][]string, 0)
tagNames := getTagNames(i)
data = append(data, tagNames)
records := getFieldValues(i)
data = append(data, records...)
return data
} | go | 16 | 0.608028 | 79 | 22.205479 | 73 | starcoderdata |
package CSP.Constraints;
import java.util.ArrayList;
public class EqualToValueConstraint implements UnaryConstraint {
private int index;
private ArrayList solution;
private Integer value;
public EqualToValueConstraint(int index, Integer value){
this.index = index;
this.value = value;
}
@Override
public boolean satisfied() {
if(solution.get(index) == null) return true;
return solution.get(index).equals(value);
}
@Override
public void setResultArray(ArrayList result) {
this.solution = result;
}
@Override
public boolean AC3Domain(ArrayList dom) {
ArrayList domRm = new ArrayList<>(dom.size());
for(int i = 0; i < dom.size(); i++){
int val = dom.get(i);
if(val != value) domRm.add(i);
}
if(domRm.size() == 0)
return false;
for(int i = domRm.size() - 1; i >= 0; i--){
int indexToRemove = domRm.get(i);
dom.remove(indexToRemove);
}
return true;
}
@Override
public int getIndex() {
return index;
}
@Override
public void simplifyDomains(Integer value, ArrayList domains) { };
} | java | 9 | 0.607727 | 90 | 25.92 | 50 | starcoderdata |
private string GetTranslation(string[] pluralForms, CultureInfo culture, int? count)
{
var dictionary = _localizationManager.GetDictionary(culture);
var pluralForm = count.HasValue ? dictionary.PluralRule(count.Value) : 0;
if (pluralForm >= pluralForms.Length)
{
if (_logger.IsEnabled(LogLevel.Warning))
{
_logger.LogWarning("Plural form '{PluralForm}' doesn't exist in values provided by the 'IStringLocalizer.Plural' method. Provided values: {PluralForms}", pluralForm, String.Join(", ", pluralForms));
}
// Use the latest available form
return pluralForms[pluralForms.Length - 1];
}
return pluralForms[pluralForm];
} | c# | 16 | 0.590629 | 218 | 41.736842 | 19 | inline |
def list2str(list_item):
'''
Converts an input list item into a str
'''
list_item = copy.deepcopy(list_item)
if isinstance(list_item, str):
return list_item
return ' ' + ' '.join(["{:18.10f}".format(_) for _ in list_item]) + ' ' | python | 12 | 0.517361 | 79 | 35.125 | 8 | inline |
def get_texture_sequence(filename, tilewidth=32, tileheight=32, margin=1, spacing=1, nearest=False):
"""Returns a texture sequence of a grid generated from a tile set."""
image = pyglet.resource.image(filename)
region = image.get_region(margin, margin, image.width-margin*2, image.height-margin*2)
# we've already thrown away the margins
rows = calculate_columns(region.height, tileheight, margin=0, spacing=spacing)
cols = calculate_columns(region.width, tilewidth, margin=0, spacing=spacing)
grid = pyglet.image.ImageGrid(region,
rows,
cols,
row_padding=spacing,
column_padding=spacing,
)
texture = grid.get_texture_sequence()
if nearest:
gl.glTexParameteri(texture.target, gl.GL_TEXTURE_MIN_FILTER, gl.GL_NEAREST)
gl.glTexParameteri(texture.target, gl.GL_TEXTURE_MAG_FILTER, gl.GL_NEAREST)
return texture | python | 9 | 0.607692 | 100 | 40.64 | 25 | inline |
void BoolArgument::parse(const string& usedName) {
/* No argument passed = true. */
if (result.size() == 0 || String::caseInsensitiveEquals(result, "yes") || result == "1") {
boolResult = true;
valid = true;
} else if (String::caseInsensitiveEquals(result, "no") || result == "0") {
boolResult = false;
valid = true;
} else {
errorMessage = String::format("Invalid command-line! expected \"yes\", \"no\", \"1\", or \"0\", after %s, got \"%s\"", usedName, result);
}
} | c++ | 13 | 0.565465 | 145 | 43 | 12 | inline |
import Component from '../component/component';
import reflow from '../util/reflow';
import closest from '../util/closest';
import transitionend from '../util/transition-end-event';
import Tether from 'tether';
const SELECTOR_COMPONENT = '.aui-js-popover';
const SELECTOR_DISMISS = '[data-dismiss="popover"]';
const CLASS_ARROW = 'aui-popover__arrow';
const CLASS_ARROW_SHAPE = 'aui-popover__arrow-shape';
const CLASS_ACTIVE = 'is-active';
const CLASS_SHOWN = 'is-shown';
const CLASS_POPOVER_IS_OPEN = 'aui-popover-is-open';
const ARROW_SIZE = 20;
const AttachmentMap = {
top: 'bottom center',
right: 'middle left',
bottom: 'top center',
left: 'middle right'
}
/**
* Class constructor for Popover AUI component.
* Implements AUI component design pattern defined at:
* https://github.com/...
*
* @param {HTMLElement} element The element that will be upgraded.
*/
export default class Popover extends Component {
/**
* Upgrades all Popover AUI components.
* @returns {Array} Returns an array of all newly upgraded components.
*/
static upgradeElements() {
let components = [];
Array.from(document.querySelectorAll(SELECTOR_COMPONENT)).forEach(element => {
if (!Component.isElementUpgraded(element)) {
components.push(new Popover(element));
}
});
return components;
};
/**
* Initialize component
*/
init() {
super.init();
this._body = document.querySelector('body');
this._id = this._element.getAttribute('id');
this._trigger = document.getElementById(this._element.getAttribute('for'));
this._tether = null;
const placement = this._element.hasAttribute('data-placement') ? this._element.getAttribute('data-placement') : 'top';
this._attachement = AttachmentMap[placement.toLowerCase()];
const content = this._element.querySelector('.aui-popover__content');
const arrowColor = this._element.hasAttribute('data-arrow-color') ? this._element.getAttribute('data-arrow-color') : window.getComputedStyle(content).backgroundColor;
this._arrow = this._addArrow(content, arrowColor);
if (this._trigger) {
this._trigger.addEventListener('click', this._boundClickHandler = (event) => this.toggle(event));
}
}
/**
* Dispose component
*/
dispose() {
super.dispose();
this.hide();
this.removeChild(this._arrow);
if (this._trigger) {
this._trigger.removeEventListener('click', this._boundClickHandler);
}
}
/**
* Toggle show/hide Popover
* @param {Event} event Click event of trigger element (optional)
*/
toggle(event) {
const performToggle = () => {
if (!this._element.classList.contains(CLASS_ACTIVE) && this._tether) {
this.show();
} else {
this.hide();
}
};
if (event) {
event.preventDefault();
if (this._tether === null) {
this._tether = new Tether({
element: this._element,
target: event.currentTarget,
attachment: this._attachement,
classPrefix: 'aui-tether',
// NOTE We set an offset in CSS, because this offset wouln't be
// flipped as it should:
// https://github.com/HubSpot/tether/issues/106
offset: '0 0',
constraints: [{
to: 'window',
pin: ['left', 'right'],
attachment: 'together'
}],
optimizations: {
gpu: false
}
});
reflow(this._element); // REVIEW Do we need this anymore?
this._tether.position();
}
performToggle();
} else {
performToggle();
}
}
/**
* Show Popover
*/
show() {
this._body.classList.add(CLASS_POPOVER_IS_OPEN);
this._element.classList.add(CLASS_ACTIVE);
this._element.classList.add(CLASS_SHOWN);
setTimeout(() => {
window.addEventListener('click', this._boundWindowClickHandler = (event) => this._onClickOutside(event));
})
}
/**
* Hide Popover
*/
hide() {
this._element.classList.remove(CLASS_SHOWN);
this._element.addEventListener(transitionend, this._boundAnimationendHandler = (event) => this._onHideComplete(event));
window.removeEventListener('click', this._boundWindowClickHandler);
}
/**
* Clean up Tether instance
* @private
*/
_cleanupTether() {
if (this._tether) {
this._tether.destroy();
this._tether = null;
}
this._element.removeAttribute('style');
}
/**
* Handle click of window.
* @param {Event} event The event that fired.
* @private
*/
_onClickOutside(event) {
// Hide if target dismisses Popover
if (closest(event.target, SELECTOR_DISMISS, this._element)) {
this.hide();
// Hide if target is not inside Popover and is not a trigger element
} else if (!this._element.contains(event.target) && event.target !== this._trigger) {
this.hide();
}
}
/**
* Handle hide transition complete.
* @param {Event} event The event that fired.
* @private
*/
_onHideComplete(event) {
this._body.classList.remove(CLASS_POPOVER_IS_OPEN);
this._element.classList.remove(CLASS_ACTIVE);
this._element.removeEventListener(transitionend, this._boundAnimationendHandler);
this._cleanupTether();
}
/**
* Adds an arrow SVG element
* <span class="…"><svg … ><path … />
*
* @param {HTMLElement} parent element to append arrow to.
* @param {string} color used as value of fill property.
* @returns {HTMLElement} the added arrow element.
*/
_addArrow(parent, color) {
const element = document.createElement('span');
element.classList.add(CLASS_ARROW);
const svg = this.createSvgNode('svg', {
class: CLASS_ARROW_SHAPE,
width: ARROW_SIZE,
height: ARROW_SIZE,
viewBox: `0 0 ${ARROW_SIZE} ${ARROW_SIZE}`
});
// Draw a diamond square ◆
const path = this.createSvgNode('path', {
d: `M${ARROW_SIZE / 2} 0 L ${ARROW_SIZE} ${ARROW_SIZE / 2} L ${ARROW_SIZE / 2} ${ARROW_SIZE} L 0 ${ARROW_SIZE / 2} Z`,
fill: color
});
svg.appendChild(path);
element.appendChild(svg);
parent.appendChild(element);
return element;
}
} | javascript | 22 | 0.625844 | 170 | 27.672811 | 217 | starcoderdata |
#aright
{
align-content: right;
}
ul.msg
{
list-style-type:none;
margin:0;
padding:0;
}
a:link.msg,a:visited.msg
{
display:block;
font-weight:bold;
color:black;
width:120px;
text-align:center;
padding:4px;
text-decoration:none;
text-transform:uppercase;
}
a:hover.msg,a:active.msg
{
background-color:#337ab7;
}
a:link.full_msg
{
color:black;
}
<div class="container" style="border:2px solid #337ab7;">
<div class="row" >
<div class="col-md-2 col-xs-5">
<ul class="msg">
class="msg" href="<?php echo site_url('messages/message_home/'); ?>">Inbox<?php
//$_SESSION['newmail']=0;
if(isset($_SESSION['newmail_count']) && $_SESSION['newmail_count']!==0) //$_SESSION['newmail']>0) $newmail_count!==0
{
echo " color='black'>(".$_SESSION['newmail_count'].")
} ?>
class="msg" href="<?php echo site_url('messages/get_outbox/'); ?>">Sent Message
class="msg" href="<?php echo site_url('user/user_logout/') ?>">Logout
<div class="col-md-9 col-xs-5">
<?php
// loops through the message title, body and date
if($msg_row!==0)
{
foreach ($msg_row as $row_data):?>
<?php $message_id = $row_data['message_id']; ?>
<!-- creates a link for the full message -->
<a class="full_msg" href="<?php echo site_url('messages/full_message/'); echo $message_id; ?>">
echo $row_data['msg_title']; echo nbs(1)."-"; echo'<p class="text-muted" style="display:inline;">';
echo substr($row_data['msg_body'], 0, 76); echo nbs(2);
if(isset($row_data['recieve_date']))
{
echo $row_data['recieve_date']; echo"
}
else
{
echo $row_data['send_date']; echo"
}
?>
<?php endforeach; } // end of 'foreach' and the mother 'if' statement
else
{
echo " have no Message
}
?> | php | 12 | 0.591867 | 121 | 17.275229 | 109 | starcoderdata |
<?php
declare(strict_types=1);
namespace Andriichuk\Laracash;
use Andriichuk\Laracash\Concerns\CurrencyResolver;
use Andriichuk\Laracash\Concerns\Factory;
use Andriichuk\Laracash\Concerns\Formatter;
use Andriichuk\Laracash\Concerns\Parser;
/**
* Class LaracashService
*
* @author
*/
final class LaracashService
{
/**
* @var CurrencyResolver
*/
private $currencyResolver;
/**
* @var Factory
*/
private $factory;
/**
* @var Formatter
*/
private $formatter;
/**
* @var Parser
*/
private $parser;
public function __construct(Config $config)
{
$this->currencyResolver = new CurrencyResolver($config);
$this->factory = new Factory($this->currencyResolver);
$this->formatter = new Formatter($config);
$this->parser = new Parser($config);
}
public function factory(): Factory
{
return $this->factory;
}
public function formatter(): Formatter
{
return $this->formatter;
}
public function parser(): Parser
{
return $this->parser;
}
public function currency(): CurrencyResolver
{
return $this->currencyResolver;
}
} | php | 12 | 0.614263 | 64 | 17.69697 | 66 | starcoderdata |
<?php
namespace App\Http\Controllers\ADMIN;
use App\Http\Controllers\Controller;
use Illuminate\Http\Request;
use App\Models\InstaFeeds;
use Illuminate\Support\Facades\Redirect;
class InstafeedController extends Controller
{
public function index_instafeed()
{
$template['page_title'] = 'Add InstaFeed';
$breadcrumb = [
0=>[ 'title'=>'InstaFeeds',
'link'=>route('show_instafeed') ]
];
$template['breadcrumb'] = $breadcrumb;
return view("admin.insta.add",$template);
}
public function store_instafeed(Request $request)
{
$post= new InstaFeeds;
$post->title=$request->get('title');
$post->description=$request->get('description');
$imageName = time().'.'.$request->image->extension();
$request->image->move(public_path('uploads/insta'), $imageName);
$post->image = $imageName;
$post->hyperlink=$request->get('hyperlink');
$post->status=$request->get('status');
$post->position=$request->get('position');
$post->save();
return Redirect::route('show_instafeed')->with('success','Created Successfully');
}
public function show_instafeed(InstaFeeds $post)
{
$post=InstaFeeds::all();
return view('admin.insta.insta',['post'=>$post,'page_title'=>'Instafeeds']);
}
public function edit_instafeed(InstaFeeds $post,$id)
{
$post=InstaFeeds::find($id);
return view('admin.insta.edit',['post'=>$post,'page_title'=>'Edit Instafeeds']);
}
public function update_instafeed(Request $request, InstaFeeds $post,$id)
{
$post=InstaFeeds::find($id);
$post->title=$request->get('title');
$post->description=$request->get('description');
if($_FILES['image']['size']>0){
$imageName = time().'.'.$request->image->extension();
$request->image->move(public_path('uploads/insta'), $imageName);
$post->image = $imageName;
}
$post->hyperlink=$request->get('hyperlink');
$post->status=$request->get('status');
$post->position=$request->get('position');
$post->save();
return Redirect::route('show_instafeed')->with('success','Updated Successfully');
}
public function destroy_instafeed(InstaFeeds $post,$id)
{
$post=InstaFeeds::find($id);
$post->delete();
return Redirect::route('show_instafeed')->with('success','Deleted Successfully');
}
} | php | 15 | 0.600604 | 97 | 31.703704 | 81 | starcoderdata |
def find_end_of_reference_section(docbody,
ref_start_line,
ref_line_marker,
ref_line_marker_ptn):
"""Given that the start of a document's reference section has already been
recognised, this function is tasked with finding the line-number in the
document of the last line of the reference section.
@param docbody: (list) of strings - the entire plain-text document body.
@param ref_start_line: (integer) - the index in docbody of the first line
of the reference section.
@param ref_line_marker: (string) - the line marker of the first reference
line.
@param ref_line_marker_ptn: (string) - the pattern used to search for a
reference line marker.
@return: (integer) - index in docbody of the last reference line
-- OR --
(None) - if ref_start_line was invalid.
"""
section_ended = False
x = ref_start_line
if type(x) is not int or x < 0 or \
x > len(docbody) or len(docbody) < 1:
# The provided 'first line' of the reference section was invalid.
# Either it was out of bounds in the document body, or it was not a
# valid integer.
# Can't safely find end of refs with this info - quit.
return None
# Get patterns for testing line:
t_patterns = get_post_reference_section_title_patterns()
kw_patterns = get_post_reference_section_keyword_patterns()
if None not in (ref_line_marker, ref_line_marker_ptn):
mk_patterns = [re.compile(ref_line_marker_ptn, re.I | re.UNICODE)]
else:
mk_patterns = get_reference_line_numeration_marker_patterns()
current_reference_count = 0
while x < len(docbody) and not section_ended:
# save the reference count
num_match = regex_match_list(docbody[x].strip(), mk_patterns)
if num_match:
try:
current_reference_count = int(num_match.group('marknum'))
except (ValueError, IndexError):
# non numerical references marking
pass
# look for a likely section title that would follow a reference
# section:
end_match = regex_match_list(docbody[x].strip(), t_patterns)
if not end_match:
# didn't match a section title - try looking for keywords that
# suggest the end of a reference section:
end_match = regex_match_list(docbody[x].strip(), kw_patterns)
else:
# Is it really the end of the reference section? Check within the next
# 5 lines for other reference numeration markers:
y = x + 1
line_found = False
while y < x + 200 and y < len(docbody) and not line_found:
num_match = regex_match_list(docbody[y].strip(), mk_patterns)
if num_match and not num_match.group(0).isdigit():
try:
num = int(num_match.group('marknum'))
if current_reference_count + 1 == num:
line_found = True
except ValueError:
# We have the marknum index so it is
# numeric pattern for references like
# [1], [2] but this match is not a number
pass
except IndexError:
# We have a non numerical references marking
# we don't check for a number continuity
line_found = True
y += 1
if not line_found:
# No ref line found-end section
section_ended = True
if not section_ended:
# Does this & the next 5 lines simply contain numbers? If yes, it's
# probably the axis scale of a graph in a fig. End refs section
digit_test_str = docbody[x].replace(" ", "").\
replace(".", "").\
replace("-", "").\
replace("+", "").\
replace(u"\u00D7", "").\
replace(u"\u2212", "").\
strip()
if len(digit_test_str) > 10 and digit_test_str.isdigit():
# The line contains only digits and is longer than 10 chars:
y = x + 1
digit_lines = 4
num_digit_lines = 1
while y < x + digit_lines and y < len(docbody):
digit_test_str = docbody[y].replace(" ", "").\
replace(".", "").\
replace("-", "").\
replace("+", "").\
replace(u"\u00D7", "").\
replace(u"\u2212", "").\
strip()
if len(digit_test_str) > 10 and digit_test_str.isdigit():
num_digit_lines += 1
elif len(digit_test_str) == 0:
# This is a blank line. Don't count it, to accommodate
# documents that are double-line spaced:
digit_lines += 1
y = y + 1
if num_digit_lines == digit_lines:
section_ended = True
x += 1
return x - 1 | python | 28 | 0.509276 | 82 | 47.133929 | 112 | inline |
from itertools import groupby
import heapq
X=input()
N=len(X)
X=[X[i] for i in range(0,N)]
X=groupby(X)
data=[]
i=0
for key,group in X:
g=len(list(group))
data.append((i,key,g))
i+=1
if data[0][1]=='T':
data=data[1:]
if data[-1][1]=='S':
data=data[:len(data)-1]
heapq.heapify(data)
left=0
ans=0
while data:
S=heapq.heappop(data)
T=heapq.heappop(data)
s=S[2]+left
t=T[2]
if s>=t:
left=s-t
ans+=t
else:
left=0
ans+=s
print(N-2*ans) | python | 10 | 0.55098 | 29 | 13.194444 | 36 | codenet |
# Author:
# This is unicode interface to the platform module.
from BitTorrent import platform
from BTL.platform import efs2
#get_filesystem_encoding = platform.get_filesystem_encoding
#encode_for_filesystem = platform.encode_for_filesystem
#decode_from_filesystem = platform.decode_from_filesystem
#set_config_dir = platform.set_config_dir
calc_unix_dirs = platform.calc_unix_dirs
get_free_space = platform.get_free_space
get_sparse_files_support = platform.get_sparse_files_support
is_path_too_long = platform.is_path_too_long
is_sparse = platform.is_sparse
get_allocated_regions = platform.get_allocated_regions
get_max_filesize = platform.get_max_filesize
create_shortcut = platform.create_shortcut
remove_shortcut = platform.remove_shortcut
enforce_shortcut = platform.enforce_shortcut
enforce_association = platform.enforce_association
btspawn = platform.btspawn
spawn = platform.spawn
#get_language = platform.get_language
smart_gettext_and_install = platform.smart_gettext_and_install
#read_language_file = platform.read_language_file
#write_language_file = platform.write_language_file
#install_translation = platform.install_translation
write_pid_file = platform.write_pid_file
#old_open = open
#def open(name, mode='r'):
# return old_open(efs2(name), mode)
#
#
#def language_path():
# return decode_from_filesystem(platform.language_path())
#
#def get_dir_root(shellvars, default_to_home=True):
# return decode_from_filesystem(
# platform.get_dir_root(shellvars, default_to_home))
def get_temp_dir():
return decode_from_filesystem(platform.get_temp_dir())
def get_temp_subdir():
return decode_from_filesystem(platform.get_temp_subdir())
#def get_config_dir():
# return decode_from_filesystem(platform.get_config_dir())
#
#def get_old_dot_dir():
# return decode_from_filesystem(platform.get_old_dot_dir())
#
#def get_dot_dir():
# return decode_from_filesystem(platform.get_dot_dir())
#
#def get_cache_dir():
# return decode_from_filesystem(platform.get_cache_dir())
def get_home_dir():
return decode_from_filesystem(platform.get_home_dir())
def get_local_data_dir():
return decode_from_filesystem(platform.get_local_data_dir())
def get_old_incomplete_data_dir():
return decode_from_filesystem(platform.get_old_incomplete_data_dir())
def get_incomplete_data_dir():
return decode_from_filesystem(platform.get_incomplete_data_dir())
def get_save_dir():
return decode_from_filesystem(platform.get_save_dir())
def get_shell_dir(value):
return decode_from_filesystem(platform.get_shell_dir(value))
def get_startup_dir():
return decode_from_filesystem(platform.get_startup_dir()) | python | 8 | 0.709834 | 71 | 32.976471 | 85 | starcoderdata |
from django.db import models
from emp_main.models import Datapoint
from .apps import app_url_prefix
class DemoAppPage(models.Model):
"""
A simple example for model based pages for an EMP UI app.
"""
page_name = models.CharField(
max_length=18,
help_text=(
"The name of the page as displayed in the nav bar. Should not "
"exceed 18 chars, as the string will be wider then the available "
"space in the navbar."
)
)
page_slug = models.SlugField(
unique=True,
help_text=(
"The name of the page used in the URL of it. Must be unique "
"as two pages of this app cannot have the same url."
)
)
page_content = models.TextField(
help_text=(
"This is an example for some Content of the page that can be "
"configured dynamically, i.e. by using Django's admin."
)
)
PAGE_BACKGROUD_COLOR_CHOICES = [
("transparent", "transparent"),
("yellow", "yellow"),
("red", "red"),
]
page_background_color = models.CharField(
max_length=11,
choices=PAGE_BACKGROUD_COLOR_CHOICES,
default=PAGE_BACKGROUD_COLOR_CHOICES[0][0],
help_text=(
"Allows configuring the background color of the page."
)
)
demo_datapoint = models.ForeignKey(
Datapoint,
on_delete=models.CASCADE,
null=True,
blank=True,
help_text=(
"A simple example how to use a Datapoint in a model."
),
)
page_has_detail = models.BooleanField(
default=False,
help_text=(
"Page displays datapoint detail page if True."
),
)
def get_absolute_url(self):
u = "/" + app_url_prefix + "/" + self.page_slug + "/"
return u | python | 12 | 0.562931 | 78 | 27.969231 | 65 | starcoderdata |
const { Listr } = require('listr2');
const { flags: flagTypes } = require('@oclif/command');
const BaseCommand = require('../oclif/command/BaseCommand');
const MuteOneLineError = require('../oclif/errors/MuteOneLineError');
class ResetCommand extends BaseCommand {
/**
* @param {Object} args
* @param {Object} flags
* @param {resetSystemConfig} resetSystemConfig
* @param {isSystemConfig} isSystemConfig
* @param {Config} config
* @param {ConfigCollection} configCollection
* @param {DockerCompose} dockerCompose
* @param {Docker} docker
* @param {tenderdashInitTask} tenderdashInitTask
* @return {Promise
*/
async runWithDependencies(
args,
{
verbose: isVerbose,
hard: isHardReset,
'platform-only': isPlatformOnlyReset,
},
resetSystemConfig,
isSystemConfig,
config,
configCollection,
dockerCompose,
docker,
tenderdashInitTask,
) {
if (isHardReset && !isSystemConfig(config.getName())) {
throw new Error(`Cannot hard reset non-system config "${config.getName()}"`);
}
const tasks = new Listr([
{
title: 'Stop services',
task: async () => dockerCompose.stop(config.toEnvs()),
},
{
title: 'Remove all services and associated data',
enabled: () => !isPlatformOnlyReset,
task: async () => dockerCompose.down(config.toEnvs()),
},
{
title: 'Remove platform services and associated data',
enabled: () => isPlatformOnlyReset,
task: async () => {
// Remove containers
const coreContainerNames = ['core', 'sentinel'];
const containerNames = await dockerCompose
.getContainersList(config.toEnvs(), undefined, true);
const platformContainerNames = containerNames
.filter((containerName) => !coreContainerNames.includes(containerName));
await dockerCompose.rm(config.toEnvs(), platformContainerNames);
// Remove volumes
const coreVolumeNames = ['core_data'];
const { COMPOSE_PROJECT_NAME: composeProjectName } = config.toEnvs();
const projectvolumeNames = await dockerCompose.getVolumeNames(config.toEnvs());
await projectvolumeNames
.filter((volumeName) => !coreVolumeNames.includes(volumeName))
.map((volumeName) => `${composeProjectName}_${volumeName}`)
.forEach(async (volumeName) => docker.getVolume(volumeName).remove());
},
},
{
title: `Reset config ${config.getName()}`,
enabled: () => isHardReset,
task: () => resetSystemConfig(configCollection, config.getName(), isPlatformOnlyReset),
},
{
title: 'Initialize Tenderdash',
enabled: () => !isHardReset,
task: () => tenderdashInitTask(config),
},
],
{
renderer: isVerbose ? 'verbose' : 'default',
rendererOptions: {
clearOutput: false,
collapse: false,
showSubtasks: true,
},
});
try {
await tasks.run();
} catch (e) {
throw new MuteOneLineError(e);
}
}
}
ResetCommand.description = `Reset node data
Reset node data
`;
ResetCommand.flags = {
...BaseCommand.flags,
hard: flagTypes.boolean({ char: 'h', description: 'reset config as well as data', default: false }),
'platform-only': flagTypes.boolean({ char: 'p', description: 'reset platform data only', default: false }),
};
module.exports = ResetCommand; | javascript | 22 | 0.620993 | 109 | 29.387931 | 116 | starcoderdata |
from easygraphics import *
import time
init_graph(640, 480)
set_color(Color.BLUE);
set_fill_color(Color.GREEN);
set_render_mode(RenderMode.RENDER_MANUAL)
x = 0;
while is_run():
x = (x + 1) % 440;
if delay_jfps(60, 0):
clear_device()
draw_ellipse(x + 100, 200, 100, 100)
time.sleep(0.5)
close_graph() | python | 9 | 0.625749 | 44 | 19.875 | 16 | starcoderdata |
function tube(funcs) {
const values = [];
if (!Array.isArray(funcs)) return Promise.resolve(values);
funcs.push(Promise.resolve());
return funcs.reduce((prev, curr) => {
return prev
.then(v => {
logger.debug("v => %j", v);
values.push(v);
if (typeof curr.then === 'function') return curr;
if (Array.isArray(curr)) return Promise.all(funcs);
throw new Error('invalid promise functions!');
})
.catch(err => {
logger.debug("err => %j", err.message);
// values.push(err);
if (typeof curr.then === 'function') return curr;
if (Array.isArray(curr)) return Promise.all(funcs);
});
}, Promise.resolve())
.then(() => values.slice(1));
}
tube([
blockHandler.writeBlock(block.blockNumber, block.blockInfo),
blockHandler.writeBlock(block.blockNumber, block.blockInfo),
blockHandler.updateBlockStatus(block.blockNumber, "finished"),
blockHandler.readBlock(block.blockNumber),
])
.then(lalala => {
logger.debug("%j", lalala);
}); | javascript | 22 | 0.637311 | 64 | 24.756098 | 41 | starcoderdata |
def handle_request(self, req, sock, addr):
try:
debug = self.cfg.debug or False
self.cfg.pre_request(self, req)
resp, environ = wsgi.create(req, sock, addr, self.address, self.cfg)
self.nr += 1
if self.alive and self.nr >= self.max_requests:
self.log.info("Autorestarting worker after current request.")
resp.force_close()
self.alive = False
respiter = self.wsgi(environ, resp.start_response)
if respiter == ALREADY_HANDLED:
return False
for item in respiter:
resp.write(item)
resp.close()
if hasattr(respiter, "close"):
respiter.close()
if req.should_close():
raise StopIteration()
self.cfg.post_request(self, req)
except StopIteration:
raise
except Exception, e:
#Only send back traceback in HTTP in debug mode.
if not self.debug:
raise
util.write_error(sock, traceback.format_exc())
return False
return True | python | 11 | 0.522998 | 80 | 38.166667 | 30 | inline |
@Test
@Transactional
public void updateProcessFormConfig() throws Exception {
// Initialize the database
processFormConfigRepository.saveAndFlush(processFormConfig);
int databaseSizeBeforeUpdate = processFormConfigRepository.findAll().size();
// Update the processFormConfig
ProcessFormConfig updatedProcessFormConfig = processFormConfigRepository.findById(processFormConfig.getId()).get();
// Disconnect from session so that the updates on updatedProcessFormConfig are not directly saved in db
em.detach(updatedProcessFormConfig);
updatedProcessFormConfig
.processDefinitionKey(UPDATED_PROCESS_DEFINITION_KEY)
.taskNodeId(UPDATED_TASK_NODE_ID)
.taskNodeName(UPDATED_TASK_NODE_NAME)
.commonTableId(UPDATED_COMMON_TABLE_ID)
.formData(UPDATED_FORM_DATA);
ProcessFormConfigDTO processFormConfigDTO = processFormConfigMapper.toDto(updatedProcessFormConfig);
restProcessFormConfigMockMvc.perform(put("/api/process-form-configs")
.contentType(TestUtil.APPLICATION_JSON)
.content(TestUtil.convertObjectToJsonBytes(processFormConfigDTO)))
.andExpect(status().isOk());
// Validate the ProcessFormConfig in the database
List<ProcessFormConfig> processFormConfigList = processFormConfigRepository.findAll();
assertThat(processFormConfigList).hasSize(databaseSizeBeforeUpdate);
ProcessFormConfig testProcessFormConfig = processFormConfigList.get(processFormConfigList.size() - 1);
assertThat(testProcessFormConfig.getProcessDefinitionKey()).isEqualTo(UPDATED_PROCESS_DEFINITION_KEY);
assertThat(testProcessFormConfig.getTaskNodeId()).isEqualTo(UPDATED_TASK_NODE_ID);
assertThat(testProcessFormConfig.getTaskNodeName()).isEqualTo(UPDATED_TASK_NODE_NAME);
assertThat(testProcessFormConfig.getCommonTableId()).isEqualTo(UPDATED_COMMON_TABLE_ID);
assertThat(testProcessFormConfig.getFormData()).isEqualTo(UPDATED_FORM_DATA);
} | java | 12 | 0.743973 | 123 | 58.285714 | 35 | inline |
def test_usb_device(self):
"""Demo instanciation from an existing UsbDevice.
"""
candidate = Ftdi.get_identifiers(self.ftdi_url)
usbdev = UsbTools.get_device(candidate[0])
spi = SpiController(cs_count=1)
spi.configure(usbdev, interface=candidate[1])
flash = SerialFlashManager.get_from_controller(spi, cs=0,
freq=self.frequency) | python | 9 | 0.580866 | 75 | 47.888889 | 9 | inline |
@Test
public void testWithTimeZoneExample()
{
// begin-snippet: with_time_zone
try (WithTimeZone tz = new WithTimeZone("UTC"))
{
// All code within this block will see the computer as being in the UTC time zone
}
// The computer's time zone will revert to previous setting here
// end-snippet
} | java | 9 | 0.662651 | 89 | 29.272727 | 11 | inline |
import React from 'react'
import Layout from '../../components/layout'
import { ERROR_MESSAGE } from '../../utils'
import SEO from '../../components/seo'
import Hero from '../../components/OverMij/Hero'
import Body from '../../components/OverMij/Body'
const OverMij = (props) => {
const {
pageContext: {
page,
page: { internalName, hero, sections },
},
} = props
return (
<SEO title={internalName} />
{props.pageContext.page ? (
<>
{hero && <Hero page={page} />}
<Body sections={sections} />
) : (
)}
)
}
export default OverMij | javascript | 17 | 0.550365 | 48 | 21.096774 | 31 | starcoderdata |
/*
ser autenticavel significa ter um metodo autenticar
duck type
*/
export class AuthenticationSystem{
static login(autenticable, password){
if(AuthenticationSystem.isAthenticable(autenticable)){
return (autenticable.authenticate(password));
}
return false;
}
static isAthenticable(autenticable){
return "authenticate" in autenticable &&
autenticable.authenticate instanceof Function;
}
} | javascript | 7 | 0.699605 | 62 | 25.684211 | 19 | starcoderdata |
# Copyright 2021 All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
""" Representation Probing """
from typing import Optional, Tuple, Union
from absl import app
from absl import logging
from datasets import DatasetDict
from datasets import load_from_disk
from einops import rearrange
from einops import repeat
import jax
import jax.numpy as jnp
import jax.random as jr
import numpy as np
import pandas as pd
from pandas import DataFrame
import toolz.curried as T
from tqdm import trange
import tree
from probing._src.configurable import configurable
from probing._src.constants import COLORS
from probing.representations import data
from probing.representations import models
@configurable
def repr_probing( # pytype: disable=annotation-type-mismatch
repr_ds: Optional[str] = None,
preds_path: Optional[str] = None,
results_path: Optional[str] = None,
seed: int = 12345,
nb_seeds: int = 5,
nb_points: int = 10,
batch_size: int = 64,
n_training_steps: int = 4000,
max_parallel: int = -1,
log_freq: int = 0,
max_batch_size: int = 1024,
ds_fits_in_vram: bool = True,
learning_rate: float = 1e-4,
hidden_sizes: Tuple[int] = (512, 512),
validation_split: str = 'validation',
) -> Tuple[DataFrame, DataFrame]:
"""Run representation probing.
Depending on the representation size, we may need to do jobs in smaller
batches.
Args:
seed: Random seed
nb_seeds: Number of random seeds per point
nb_points: Number of point to run along the curve
batch_size: Batch size for each model.
n_training_steps: Number of training steps.
max_parallel: Maximum number of models that can be trained in parallel.
log_freq: Logging frequency
max_batch_size: Maximum batch size to use during evaluation.
learning_rate: Learning rate
hidden_sizes: Size of each hidden layer.
repr_dataset: Directory containing a hf dataset with representations.
preds: path to store predictions
results: path to store results in
ds_fits_in_vram: predicate indicating if the dataset fits in VRAM. This
should only be set as a last resort, max_parallel is much faster.
validation_split: split to use for calculating validation metrics. this
should be `validattion` or `test`.
"""
if not isinstance(repr_ds, DatasetDict):
repr_ds = load_from_disk(repr_ds)
if validation_split == 'train':
raise ValueError(
'validation split cannot be train, choose one of "validation" or "test".'
)
if validation_split == "test":
logging.warning('received validation_split="test".')
jobs = data.generate_jobs(
repr_ds['train'],
nb_seeds=nb_seeds,
nb_points=nb_points,
seed=seed,
)
# configure chex compile assertions
chex_expect_num_compile = 1
if len(jobs) % max_parallel != 0:
logging.warning(
'the # of jobs (%d) should be divisible by max_parallel (%d), otherwise'
'jax will have to recompile every step for the last set of models.',
len(jobs), max_parallel)
chex_expect_num_compile = 2
val_ds = repr_ds[validation_split]
# Create RNGs
# Initialise the model's parameters and the optimiser's state.
# each initialization uses a different rng.
n_models = len(jobs)
rng = jr.PRNGKey(seed)
rngs = jr.split(rng, n_models)
rngs, init_rngs, data_rngs = zip(*[jr.split(rng, 3) for rng in rngs])
train_ds = repr_ds['train']
val_ds = repr_ds['test']
# build models.
# Depending on the representation size, we may need to do jobs in smaller
# batches, however, we will maintain the same functions throughout.
# only the parameter sets need get reset.
input_shape = np.shape(train_ds[0]['hidden_states'])
n_classes = len(train_ds[0]['label'])
init_fn, update_fn, metrics_fn = models.build_models(
input_shape,
hidden_sizes,
batch_size=batch_size,
n_classes=n_classes,
learning_rate=learning_rate)
# create train iter
train_iter = data.jax_multi_iterator(
train_ds,
batch_size,
ds_fits_in_vram=ds_fits_in_vram,
max_traces=chex_expect_num_compile,
)
# add vmaps
update_fn = jax.vmap(update_fn)
# validation function uses the same data for all models.
valid_fn = jax.vmap(metrics_fn, in_axes=(0, None))
evaluate = models.evaluate(valid_fn, val_ds, max_batch_size)
# Create inner loop --->
inner_loop = _repr_curve_inner(train_iter, init_fn, update_fn, evaluate,
log_freq, n_training_steps)
# zip up the rngs into jobs and partition s.t. < max_parallel
inner_jobs = list(zip(jobs, rngs, init_rngs, data_rngs))
if max_parallel > 0:
inner_jobs = T.partition_all(max_parallel, inner_jobs)
else:
inner_jobs = [inner_jobs]
records, preds = zip(*T.map(inner_loop, inner_jobs))
df = _format_predictions(val_ds, preds, jobs)
# store results
results = _generate_results(records)
df_result = pd.DataFrame.from_records(results)
# maybe save to files
if preds_path:
df.to_csv(preds_path, index=False)
if results_path:
df_result.to_csv(results_path, index=False)
return df, df_result
@T.curry
def _repr_curve_inner(
train_iter,
init_fn,
update_fn,
evaluate,
log_freq,
n_training_steps,
jobset,
):
# unpack the jobset
jobs, rngs, init_rngs, data_rngs = zip(*jobset)
# initialize
params, opt_state = init_fn(init_rngs)
train_iter = train_iter(jobs)
rngs = jnp.asarray(rngs)
# log_freq = 400
# batch: <num_models, batch_size, *input_shape>
train_pbar = trange(int(n_training_steps), desc='Training')
for step in train_pbar:
batch = next(train_iter)
params, opt_state, up_metrics, rngs = update_fn(params, rngs, opt_state,
batch)
if log_freq > 0 and (step + 1) % log_freq == 0:
val_metrics = evaluate(params)
train_pbar.set_postfix(
avg_loss=jnp.mean(up_metrics['loss']),
# best average jsd.
val_jsd=jnp.min(jnp.mean(val_metrics['jensenshannon_div'], axis=-1)),
)
# validation
val_metrics = evaluate(params)
# avg across examples
per_job_val_metrics = tree.map_structure(lambda x: jnp.mean(x, axis=1),
val_metrics)
results = _metrics_to_results(per_job_val_metrics, jobs, n_training_steps)
results = list(results)
return results, val_metrics
def _generate_results(records):
"""" Generates and saves results. """
results = [r for resset in records for r in resset]
return results
def _metrics_to_results(metrics, jobs, t):
def make_result(item):
i, job = item
res = tree.map_structure(lambda x: x[i], metrics)
res.update({
'seed': job['seed'],
'objects': job['num_objects'],
'samples': job['samples'],
't': t,
})
return res
return T.map(make_result, enumerate(jobs))
def _format_predictions(ds, results, jobs) -> pd.DataFrame:
""" Save predictions to a CSV
cols:
example_idx,object_idx,preds,seed,samples
Args:
ds: Dataset peds were made on
preds Predictions for all jobs.
jobs List of tuples counting
"""
# chex.assert_equal_shape([ds.labels, preds[0]])
# we may have done the work in jobsets, stack preds
# preds = jnp.concatenate(preds)
results = tree.map_structure(lambda *x: jnp.concatenate(x), *results)
# shapes = tree.map_structure(lambda x: x.shape, results)
results['class_id'] = repeat(np.asarray(ds['class_id']),
'num_ex -> n num_ex',
n=len(jobs))
results['template_idx'] = repeat(np.asarray(ds['template_idx']),
'num_ex -> n num_ex',
n=len(jobs))
# add job metadata
for k in ('samples', 'num_objects', 'seed'):
results[k] = repeat(np.asarray([x[k] for x in jobs]),
'n -> n num_ex',
num_ex=len(ds))
# add colors
results['objects'] = results.pop('num_objects')
preds = results.pop('preds')
for i, color in enumerate(COLORS):
results[color] = preds[:, :, i]
# should now be all
# ic(tree.map_structure(lambda x: x.shape, results))
# flatten all
results = tree.map_structure(
lambda x: rearrange(x, 'n_jobs nex -> (n_jobs nex)'), results)
# -> csv
df = pd.DataFrame(results)
return df
def main(_):
repr_probing()
if __name__ == '__main__':
app.run(main) | python | 18 | 0.645777 | 81 | 30.207483 | 294 | starcoderdata |
Result InfoService::HandleGetAllInfoSources(
IURIRequestContext* pContext)
{
IStructuredWriter* pWriter = nullptr;
Result result = pContext->BeginJsonResponse(&pWriter);
if (result == Result::Success)
{
// Start the response as a map containing all info sources.
pWriter->BeginMap();
// Lock access to the registered sources map.
Platform::LockGuard<Platform::Mutex> infoSourcesLock(m_infoSourceMutex);
// Iterate over each registered info source and invoke the info writer callback.
for (const auto& currentSourceIter : m_registeredInfoSources)
{
// Write the source's name as the key and the info source map as the value.
pWriter->Key(currentSourceIter.value.name.AsCStr());
// Write the info source map.
WriteInfoSource(currentSourceIter.value, pWriter);
}
// End the map of info source responses.
pWriter->EndMap();
// End the response.
result = pWriter->End();
}
return result;
} | c++ | 14 | 0.643126 | 88 | 32.21875 | 32 | inline |
#!/usr/bin/env python3
import os
import sys
def part1(list):
valid_count = 0
for policy, password in list:
counts, char = policy.split(' ')
min, max = counts.split('-')
min = int(min)
max = int(max)
char_count = 0
for c in password:
if c == char:
char_count += 1
if char_count >= min and char_count <= max:
valid_count += 1
return valid_count
def part2(list):
valid_count = 0
for policy, password in list:
counts, char = policy.split(' ')
pos1, pos2 = counts.split('-')
pos1 = int(pos1)
pos2 = int(pos2)
if (password[pos1] == char) ^ (password[pos2] == char):
valid_count += 1
return valid_count
def main(arguments):
f = open('inputs/day2', 'r')
list = [l.strip('\n').split(':') for l in f.readlines()]
print(f'Part 1: {part1(list)}')
print(f'Part 2: {part2(list)}')
if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) | python | 12 | 0.539106 | 63 | 25.875 | 40 | starcoderdata |
package com.judas.katachi.ui.activities;
import android.os.Bundle;
import android.widget.ArrayAdapter;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import com.judas.katachi.R;
import com.judas.katachi.core.theme.KatachiTheme;
import com.judas.katachi.core.theme.PresetTheme;
import com.judas.katachi.ui.views.BoardView;
import com.toomasr.sgf4j.Sgf;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import static com.judas.katachi.core.prefs.PreferenceHelper.prefs;
import static com.judas.katachi.core.theme.PresetTheme.CLASSIC;
import static com.judas.katachi.utils.log.Logger.Level.DEBUG;
import static com.judas.katachi.utils.log.Logger.log;
import static com.judas.katachi.utils.log.Logger.logError;
import static com.judas.katachi.utils.view.ViewUtils.showToast;
public class BoardActivity extends AppCompatActivity {
public interface OnThemeSelectedListener {
void onThemeSelected(KatachiTheme theme);
}
private static final String TAG = BoardActivity.class.getSimpleName();
protected static final String EXTRA_THEME = TAG + ".EXTRA_THEME";
protected static final String EXTRA_MOVE = TAG + ".EXTRA_MOVE";
protected BoardView preview;
@Override
protected void onSaveInstanceState(@NonNull final Bundle outState) {
super.onSaveInstanceState(outState);
log(DEBUG, TAG, "onSaveInstanceState");
outState.putParcelable(EXTRA_THEME, preview.getTheme());
outState.putInt(EXTRA_MOVE, preview.getMoveNumber());
}
protected void showThemeDialog(final OnThemeSelectedListener listener) {
log(DEBUG, TAG, "showThemeDialog");
final List themes = new ArrayList<>();
final List themeNames = new ArrayList<>();
for (final PresetTheme preset : PresetTheme.values()) {
final KatachiTheme theme = new KatachiTheme(this, preset);
themes.add(theme);
themeNames.add(theme.name);
}
for (final KatachiTheme userTheme : prefs(this).getUserThemes()) {
final KatachiTheme theme = new KatachiTheme(this, userTheme);
themes.add(theme);
themeNames.add(theme.name);
}
final ArrayAdapter adapter = new ArrayAdapter<>(this, android.R.layout.simple_list_item_1, themeNames);
new AlertDialog.Builder(this)
.setTitle(R.string.menu_themes)
.setAdapter(adapter, (dialog, which) -> {
final int index = themeNames.indexOf(adapter.getItem(which));
final KatachiTheme theme = themes.get(index);
if (listener != null) {
listener.onThemeSelected(theme);
}
dialog.dismiss();
})
.show();
}
protected void loadDefaultGame(final KatachiTheme theme) {
log(DEBUG, TAG, "loadDefaultGame");
try {
final InputStream is = getAssets().open("ear-reddening-game.sgf");
initGame(is, theme, 127);
is.close();
} catch (final IOException e) {
logError(TAG, "loadFromAssets", e);
showToast(this, R.string.sgf_loading_failure);
}
}
private void loadFromAssets(final String filename, final KatachiTheme theme) {
log(DEBUG, TAG, "loadFromAssets " + filename);
}
protected void initGame(final InputStream stream, final KatachiTheme theme, final int move) {
log(DEBUG, TAG, "initGame");
preview.setGame(Sgf.createFromInputStream(stream));
preview.setMoveNumber(move);
preview.setTheme(theme == null ? new KatachiTheme(this, CLASSIC) : theme);
preview.postInvalidate();
}
} | java | 17 | 0.667267 | 119 | 34.981481 | 108 | starcoderdata |
/*
Reverse words in a given string
Given a String of length S, reverse the whole string without reversing the individual words in it. Words are separated by dots.
Input:
The first line contains T denoting the number of testcases. T testcases follow. Each case contains a string S containing characters.
Output:
For each test case, in a new line, output a single line containing the reversed String.
Constraints:
1 <= T <= 100
1 <= |S| <= 2000
Example:
Input:
2
i.like.this.program.very.much
pqr.mno
Output:
much.very.program.this.like.i
mno.pqr */
#include
#include
using namespace std;
void reverse(string str)
{
stack st;
string temp="";
for(int i=0;i<str.length();i++)
{
if(str[i]!='.')
temp+=str[i];
else
{
st.push(temp);
temp="";
}
}
st.push(temp);
while(!st.empty())
{
cout<<st.top();
if(st.size()>1)
cout<<".";
st.pop();
}
}
int main() {
//code
int t;
cin>>t;
while(t--)
{
string str;
cin>>str;
reverse(str);
cout<<endl;
}
return 0;
} | c++ | 11 | 0.603357 | 132 | 16.984127 | 63 | starcoderdata |
/*
* Copyright (c) 2010 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ovirt.engine.api.common.util;
/**
* A utility class for converting sizes; i.e: bytes to mega-bytes, giga-bytes to bytes, etc.
*
* @author ori
*
*/
public class SizeConverter {
public static Long BYTES_IN_MEGA = 1024L * 1024L;
public static Long MEGAS_IN_GIGA = 1024L;
public static long megasToBytes(int megabytes) {
return megabytes * BYTES_IN_MEGA;
}
public static long megasToBytes(long megabytes) {
return megabytes * BYTES_IN_MEGA;
}
public static long gigasToBytes(int gigabytes) {
return gigabytes * BYTES_IN_MEGA * MEGAS_IN_GIGA;
}
public static long gigasToBytes(long gigabytes) {
return gigabytes * BYTES_IN_MEGA * MEGAS_IN_GIGA;
}
/**
* Converts bytes to mega-bytes. Rounds down to the nearest mega-byte.
* @param bytes number of bytes
* @return number of megabytes.
*/
public static long bytesToMegas(long bytes) {
return bytes/BYTES_IN_MEGA;
}
/**
* Converts bytes to giga-bytes. Rounds down to the nearest giga-byte.
* @param bytes number of bytes
* @return number of gigabytes.
*/
public static long bytesToGigas(long bytes) {
return bytes/(BYTES_IN_MEGA * MEGAS_IN_GIGA);
}
} | java | 9 | 0.676251 | 92 | 28.34375 | 64 | starcoderdata |
import math
sta,stb=[str(i) for i in input().split()]
a = int(sta)
b = int(stb[0])*100
if stb[1] == '.':
#小数
if len(stb) >= 3:
b+=int(stb[2])*10
if len(stb) == 4:
b+=int(stb[3])
print(a*b//100) | python | 11 | 0.521739 | 41 | 17.909091 | 11 | codenet |
package com.intellij.plugins.thrift.lang.psi.presentation;
import com.intellij.navigation.ItemPresentation;
import com.intellij.navigation.ItemPresentationProvider;
import com.intellij.openapi.util.Iconable;
import com.intellij.plugins.thrift.lang.psi.ThriftSubDeclaration;
import com.intellij.plugins.thrift.lang.psi.ThriftTopLevelDeclaration;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
public class ThriftSubDeclarationPresentationProvider implements ItemPresentationProvider {
@Override
public ItemPresentation getPresentation(final ThriftSubDeclaration item) {
return new ItemPresentation() {
@Nullable
@Override
public String getPresentableText() {
return item.getName();
}
@Nullable
@Override
public String getLocationString() {
ThriftTopLevelDeclaration topLevelDeclaration = PsiTreeUtil.getParentOfType(item, ThriftTopLevelDeclaration.class, true);
return topLevelDeclaration != null ? topLevelDeclaration.getName() : item.getContainingFile().getName();
}
@Nullable
@Override
public Icon getIcon(boolean unused) {
return item.getIcon(Iconable.ICON_FLAG_VISIBILITY);
}
};
}
} | java | 13 | 0.768188 | 129 | 35.894737 | 38 | starcoderdata |
# -*- coding: utf-8 -*-
from DummyDB import DummyDB
from KnowledgeModel import BaseKnowledgeModel
class KnowledgeGraph(object):
"""
This is the class that represents connections between knowledge concepts
and supports the functions to access those connections (dependencies).
"""
#Init
def __init__(self):
#Knowledge dependency
#Dependency: [Source Concept ID][Target Concept ID] = weight
self._dependency = {}
#Observers (type of knowledge model)
self._observers = []
def setDependency(self, dependency):
self._dependency = dependency
def getDependency(self):
return self._dependency
#Accessors -- Read-Only for users
def getDependency(self, sourceID = None, targetID = None):
if (sourceID == None or targetID == None):
raise ValueError("Input source concept id or target concept id is invalid!")
if (self._dependency.get(sourceID) == None):
return None
else:
return self._dependency.get(sourceID).get(targetID);
def getDependencyBySource(self, sourceID = None):
# Return all dependencies which start by input sourceID
if (sourceID == None):
raise ValueError("Input source concept id is invalid!")
rt = {}
rt[sourceID] = self._dependency.get(sourceID)
return rt
def getDependencyByTarget(self, targetID = None):
# Return all dependencies which end with input targetID
if (targetID == None):
raise ValueError("Input target concept id is invalid!")
rt = {}
keys = self._dependency.keys()
for key in keys:
if (self._dependency.get(key).get(targetID)):
rt[key] = {}
rt[key][targetID] = self._dependency.get(key).get(targetID)
return rt
def getAllDependencies(self):
return self._dependency
def getParents(self, currentID = None):
if (currentID == None):
raise ValueError("Input current concept id is invalid!")
rt = []
childSet = []
childSet.append(currentID)
keys = self._dependency.keys()
while (childSet):
currentChild = childSet.pop()
rt.append(currentChild)
for key in keys:
if (self._dependency.get(key).get(currentChild) and key not in rt):
childSet.append(key)
rt.remove(currentID)
return rt
#Modifiers -- For admins
def updateDependency(self, sourceID = None, targetID = None, weight = None):
#Updating the existing dependency's weight
if (sourceID == None or targetID == None):
raise ValueError("Input source concept id or target concept id is invalid!")
if (weight == None):
raise ValueError("Input weight is invalid!")
if (self._dependency.get(sourceID) != None and self._dependency.get(sourceID).get(targetID) != None):
#Update the weight
self._dependency[sourceID][targetID] = weight
return True
else:
raise ValueError("The requested dependency is not available. Using 'addDependency' to create new one.")
def addDependency(self, sourceID = None, targetID = None, weight = None, acyclicCKFlag = False):
#Creating a new dependency. May or may not need to do an acyclic check
if (sourceID == None or targetID == None):
raise ValueError("Input source concept id or target concept id is invalid!")
if (weight == None):
raise ValueError("Input weight is invalid!")
if (self._dependency.get(sourceID) != None and self._dependency.get(sourceID).get(targetID) != None):
raise ValueError("The requested dependency is already available. Using 'updateDependency' to change its weight.")
else:
if (acyclicCKFlag == True and acyclicCK(sourceID, targetID) == False):
raise ValueError("The requested dependency is invalid. It makes the acyclic graph cyclic.")
else:
#Create a new dependency
if (self._dependency.get(sourceID) == None):
self._dependency[sourceID] = {}
self._dependency[sourceID][targetID] = weight
return True
def delDependency(self, sourceID = None, targetID = None):
#Deleting the existing dependency
if (sourceID == None or targetID == None):
raise ValueError("Input source concept id or target concept id is invalid!")
if (self._dependency.get(sourceID) != None and self._dependency.get(sourceID).get(targetID) != None):
# Delete the dependency
del self._dependency[sourceID][targetID]
if (self._dependency.get(sourceID) == None):
del self._dependency[sourceID]
return True
else:
raise ValueError("The requested dependency is not available. No need to delete.")
def acyclicCK(self, sourceID = None, targetID = None):
# Check one graph's acyclic ability when adding a new dependency
# True --> acyclic; False --> not acyclic
#Create a new dependency
newDependency = self._dependency
if (newDependency.get(sourceID) == None):
newDependency[sourceID] = {}
newDependency[sourceID][targetID] = 1.0
# Algorithm for checking: A topological sort
"""
Pseudo code:
Q <-- Set of all nodes with no incoming edges
while Q is non-empty do
remove a node n from Q
for each node m with an edge e from n to m do
remove edge e from the graph
if m has no other incoming edges then
insert m into Q
if graph has edges then
output error message (graph has a cycle)
else
output correct message (graph doesn't has a cycle)
"""
# TODO:Implement above algo
return True
#Obsering pattern
def addObserver(self, observer = None):
if (observer == None):
raise ValueError("Input observer is not available!")
if (isinstance (observer, BaseKnowledgeModel)):
if not observer in self._observers:
self._observers.append(observer)
else:
raise TypeError("Input is not an acceptable type of observer(knowledge model)!")
def removeObserver(self, observer = None):
if (observer == None):
raise ValueError("Input observer is not available!")
try:
if observer in self._observers:
self._observers.remove(observer)
except ValueError:
pass
def updateConcept(self):
#Inform the observers
for observer in self._observers:
observer.initGraph(self) # Pass KG itself to its observers, including its graph data and the methods
#Test cases
if __name__ == '__main__':
print "--Start Test--"
TMP_DB = DummyDB()
print "DB's original content is:"
print TMP_DB._dependency
KG = KnowledgeGraph()
#Get student's knowledge from DB
KG._dependency = TMP_DB._dependency
#Usual cases
graph = KG.getAllDependencies()
print "The all dependencies in Knowledge Graph are:"
print graph
weight1 = KG.getDependency('Math-2', 'Math-3')
print "The weight between 'Math-2' and 'Math-3' is:"
print weight1
s1 = KG.getDependencyBySource('Math-2')
print "The all target concepts which start from 'Math-2' are:"
print s1
t1 = KG.getDependencyByTarget('Math-4')
print "The all source concepts which end by 'Math-4' are:"
print t1
parents = KG.getParents('Math-4')
print "The parents concepts of 'Math-4' are:"
print parents
KG.updateDependency('Math-2', 'Math-3', 0.95)
weight2 = KG.getDependency('Math-2', 'Math-3')
print "The updated weight between 'Math-2' and 'Math-3' is:"
print weight2
weight3 = KG.getDependency('Math-1', 'Math-4')
print "The weight between 'Math-1' and 'Math-4' is (non-available yet):"
print weight3
KG.addDependency('Math-1', 'Math-4', 0.4)
weight3 = KG.getDependency('Math-1', 'Math-4')
print "New created weight between 'Math-1' and 'Math-4' is:"
print weight3
KG.delDependency('Math-2', 'Math-3')
weight4 = KG.getDependency('Math-2', 'Math-3')
print "The weight between 'Math-2' and 'Math-3' is (has been deleted):"
print weight4
TMP_DB._dependency = KG._dependency
print "DB's new content is:"
print TMP_DB._dependency
#Unusual cases (Error should be arised for each of cases below if open the comment)
#Update non-available dependency
#KG.updateDependency('Math-2', 'Math-1', 1.0)
#Add a repeated dependency
#KG.addDependency('Math-1', 'Math-2', 1.0)
#Delete an non-available dependency
#KG.delDependency('Math-4', 'Math-2')
print "--End of test--" | python | 17 | 0.602801 | 125 | 31.782918 | 281 | starcoderdata |
#ifndef __SRC_LIB_PRICING_STEEPESTEDGEPRICING_HPP__
#define __SRC_LIB_PRICING_STEEPESTEDGEPRICING_HPP__
#include "basis.hpp"
#include "pricing.hpp"
#include "runtime.hpp"
//
class SteepestEdgePricing : public Pricing {
private:
Runtime& runtime;
Basis& basis;
std::vector weights;
HighsInt chooseconstrainttodrop(const Vector& lambda) {
auto activeconstraintidx = basis.getactive();
auto constraintindexinbasisfactor = basis.getindexinfactor();
HighsInt minidx = -1;
double maxval = 0.0;
for (HighsInt i = 0; i < activeconstraintidx.size(); i++) {
HighsInt indexinbasis =
constraintindexinbasisfactor[activeconstraintidx[i]];
if (indexinbasis == -1) {
printf("error\n");
}
assert(indexinbasis != -1);
double val = lambda.value[indexinbasis] * lambda.value[indexinbasis] /
weights[indexinbasis];
if (val > maxval && fabs(lambda.value[indexinbasis]) >
runtime.settings.lambda_zero_threshold) {
if (basis.getstatus(activeconstraintidx[i]) ==
BasisStatus::ActiveAtLower &&
-lambda.value[indexinbasis] > 0) {
minidx = activeconstraintidx[i];
maxval = val;
} else if (basis.getstatus(activeconstraintidx[i]) ==
BasisStatus::ActiveAtUpper &&
lambda.value[indexinbasis] > 0) {
minidx = activeconstraintidx[i];
maxval = val;
} else {
// TODO
}
}
}
return minidx;
}
public:
SteepestEdgePricing(Runtime& rt, Basis& bas)
: runtime(rt),
basis(bas),
weights(std::vector 1.0)){};
HighsInt price(const Vector& x, const Vector& gradient) {
Vector lambda = basis.ftran(gradient);
HighsInt minidx = chooseconstrainttodrop(lambda);
return minidx;
}
void update_weights(const Vector& aq, const Vector& ep, HighsInt p,
HighsInt q) {
HighsInt rowindex_p = basis.getindexinfactor()[p];
Vector v = basis.btran(aq);
double weight_p = weights[rowindex_p];
for (HighsInt i = 0; i < runtime.instance.num_var; i++) {
if (i == rowindex_p) {
weights[i] = weight_p / (aq.value[rowindex_p] * aq.value[rowindex_p]);
} else {
weights[i] = weights[i] -
2 * (aq.value[i] / aq.value[rowindex_p]) * (v.value[i]) +
(aq.value[i] * aq.value[i]) /
(aq.value[rowindex_p] * aq.value[rowindex_p]) *
weight_p;
}
}
}
};
#endif | c++ | 21 | 0.578728 | 78 | 29.367816 | 87 | starcoderdata |
import click
from FeatureCloud.api.cli.test.commands import test
from FeatureCloud.api.cli.controller.commands import controller
from FeatureCloud.api.cli.app.commands import app
@click.group('first-level')
def fc_cli() -> None:
"""FeatureCloud pip package"""
fc_cli.add_command(test)
fc_cli.add_command(controller)
fc_cli.add_command(app)
if __name__ == "__main__":
fc_cli() | python | 6 | 0.730077 | 63 | 21.882353 | 17 | starcoderdata |
pub async fn identify_controller(&self) {
// TODO: Use same buffer
let data: Dma<IdentifyControllerData> = unsafe { Dma::zeroed().unwrap().assume_init() };
// println!(" - Attempting to identify controller");
let comp = self
.submit_and_complete_admin_command(|cid| NvmeCmd::identify_controller(cid, data.physical()))
.await;
log::trace!("Completion: {:?}", comp);
// println!(" - Dumping identify controller");
let model_cow = String::from_utf8_lossy(&data.model_no);
let serial_cow = String::from_utf8_lossy(&data.serial_no);
let fw_cow = String::from_utf8_lossy(&data.firmware_rev);
let model = model_cow.trim();
let serial = serial_cow.trim();
let firmware = fw_cow.trim();
println!(
" - Model: {} Serial: {} Firmware: {}",
model, serial, firmware,
);
} | rust | 12 | 0.564047 | 104 | 36.2 | 25 | inline |
import numpy as np
import os
import torch
import torch.nn.functional as F
from torchvision.utils import save_image
import data
import models
import utils.my_logging as logging
from utils.util import lab_to_rgb
from .base_solver import BaseSolver
logger = logging.get_logger(__name__)
class REFCOLORSolver(BaseSolver):
def __init__(self, cfg):
super().__init__(cfg)
self.prepare_for_testing()
def load_biggan(self):
self.biggan_dis = models.biggan.Discriminator()
self.biggan_enc = models.biggan.Encoder()
self.biggan_gen = models.biggan.Generator()
self.biggan_dis.load_state_dict(torch.load(self.cfg.BIGGAN_PRETRAIN.DIS))
self.biggan_enc.load_state_dict(torch.load(self.cfg.BIGGAN_PRETRAIN.ENC))
self.biggan_gen.load_state_dict(torch.load(self.cfg.BIGGAN_PRETRAIN.GEN))
self.biggan_enc = self.biggan_enc.to(self.device)
self.biggan_gen = self.biggan_gen.to(self.device)
self.biggan_dis = self.biggan_dis.to(self.device)
self.biggan_dis.eval()
self.biggan_enc.eval()
self.biggan_gen.eval()
for param in self.biggan_dis.parameters():
param.requires_grad = False
for param in self.biggan_gen.parameters():
param.requires_grad = False
for param in self.biggan_enc.parameters():
param.requires_grad = False
def prepare_for_testing(self):
self.net_corr = models.ref_based_model.CorrespondenceNet(self.cfg.REFCOLOR.CORR, )
self.net_gen = models.ref_based_model.Generator(self.cfg.REFCOLOR.GEN, )
self.to_device()
self.load_biggan()
self.model = models.ref_based_model.RefBasedModel(self.net_corr, self.net_gen, None, None, self.biggan_dis,
self.biggan_enc, self.biggan_gen, self.cfg)
if self.cfg.DIVERSE.DIRECTION >= 0:
self.deformator = models.latent_deformator.LatentDeformator(
shift_dim=119, type=models.latent_deformator.DeformatorType.ORTHO)
self.deformator.load_state_dict(torch.load(self.cfg.DIVERSE.CKP, map_location='cpu'))
self.deformator.to(self.device).eval()
# print('net_corr: ', sum(map(lambda x: x.numel(), self.net_corr.parameters())))
# print('net_gen: ', sum(map(lambda x: x.numel(), self.net_gen.parameters())))
# print('biggan_dis: ', sum(map(lambda x: x.numel(), self.biggan_dis.parameters())))
# print('biggan_enc: ', sum(map(lambda x: x.numel(), self.biggan_enc.parameters())))
# print('biggan_gen: ', sum(map(lambda x: x.numel(), self.biggan_gen.parameters())))
def test(self):
self.load_from_ckp()
self.test_dl = data.get_loader(cfg=self.cfg, ds=self.cfg.DATA.NAME)
torch.manual_seed(self.cfg.SEED)
np.random.seed(self.cfg.SEED)
data_iter = iter(self.test_dl)
bs_idx = 0
while True:
try:
self.read_data_from_dataiter(data_iter)
except StopIteration:
break
if self.cfg.DIVERSE.DIRECTION > -2:
if hasattr(self, 'deformator'):
# diverse colorization by walking through the interpretable latent space
for div_idx, shift in enumerate(
np.arange(-self.cfg.DIVERSE.SHIFT_RANGE, self.cfg.DIVERSE.SHIFT_RANGE + 1e-9,
self.cfg.DIVERSE.SHIFT_RANGE / self.cfg.DIVERSE.SHIFT_COUNT)):
div_info = f'_direction{self.cfg.DIVERSE.DIRECTION}num{div_idx}'
ohv = torch.zeros(119).to(self.device)
ohv[self.cfg.DIVERSE.DIRECTION] = shift
latent_shift = self.deformator(ohv)
pad = torch.zeros(latent_shift.size(0), 1).to(self.device)
latent_shift = torch.cat((latent_shift, pad), dim=1)
self.sample_data['shift'] = latent_shift
self.validate('test_bs{}'.format(bs_idx), div_info)
else:
# diverse colorization by adding noise to the latent code
for div_idx in range(2 * self.cfg.DIVERSE.SHIFT_COUNT + 1):
div_info = f'_randdirectionnum{div_idx}'
latent_shift = torch.randn(1, 120, device=self.device)
self.sample_data['shift'] = latent_shift
self.validate('test_bs{}'.format(bs_idx), div_info)
else:
self.validate('test_bs{}'.format(bs_idx))
bs_idx += 1
@torch.no_grad()
def validate(self, info, div_info=''):
self.net_gen.eval()
self.net_corr.eval()
out = self.model('generate_fake', data=self.sample_data)
x_gray_lab = torch.cat(
[
self.sample_data['x_l'] * 50.0 + 50.0,
0 * self.sample_data['x_l'],
0 * self.sample_data['x_l'],
],
dim=1,
)
out['x_gray'] = lab_to_rgb(x_gray_lab)
out['ref'] = out['ref'] * 0.5 + 0.5
if not self.cfg.REFCOLOR.CORR.WARP_FEAT:
out['warp_image'] = (
F.interpolate(
out['warp_image'][:, 0:3, :, :],
size=out['x_gray'].size()[2:],
mode='bilinear',
) * 0.5 + 0.5)
images = torch.cat(
[
out['x_gray'],
out['ref'],
out['warp_image'],
out['fake_rgb'],
self.sample_data['x_rgb'] * 0.5 + 0.5,
],
dim=3,
).data.cpu()
else:
images = torch.cat(
[
out['x_gray'],
out['ref'],
out['fake_rgb'],
self.sample_data['x_rgb'] * 0.5 + 0.5,
],
dim=3,
).data.cpu()
self.save_single_images(images, ('gray', 'ref', 'warp', 'fake', 'gt'), div_info)
if self.cfg.DATA.FULL_RES_OUTPUT:
self.save_single_images(out['fake_rgb_full_res'], ('full_resolution_results', ), div_info)
try:
save_image(
images,
os.path.join(
self.cfg.TEST.LOG_DIR,
'out_{}{}.png'.format(info, div_info),
),
normalize=True,
nrow=1,
)
except: # noqa: E722
pass
self.net_gen.train()
self.net_corr.train()
def save_single_images(self, images, name_list, div_info=''):
n, _, _, w = images.size()
assert w % len(name_list) == 0
for i, sub_images in enumerate(torch.split(images, w // len(name_list), dim=-1)):
p = os.path.join(
self.cfg.TEST.LOG_DIR,
name_list[i],
)
try:
os.makedirs(p)
except: # noqa: E722
pass
for j in range(n):
img_name = self.sample_data['image_name'][j]
save_image(
sub_images[j],
os.path.join(p, f'{img_name}{div_info}.png'),
normalize=True,
nrow=1,
)
| python | 21 | 0.509573 | 115 | 38.941176 | 187 | research_code |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[CreateAssetMenu]
public class ShopCategoryData : ScriptableObject
{
public ShopItemInstanceData[] categoryItems;
} | c# | 7 | 0.81592 | 48 | 21.333333 | 9 | starcoderdata |
//
// Copyright (c) 2008-2018 the Urho3D project.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
#include "../Precompiled.h"
#include "../Container/ArrayPtr.h"
#include "../Core/Profiler.h"
#include "../Core/Context.h"
#include "../IO/Deserializer.h"
#include "../IO/Log.h"
#include "../IO/MemoryBuffer.h"
#include "../Resource/YAMLFile.h"
#include "../Resource/ResourceCache.h"
#include
#include
#include "../DebugNew.h"
namespace YAML
{
template<>
struct convert
{
static Node encode(const Urho3D::String& rhs)
{
return Node(rhs.CString());
}
static bool decode(const Node& node, Urho3D::String& rhs)
{
if(!node.IsScalar())
return false;
rhs = node.Scalar().c_str();
return true;
}
};
}
namespace Urho3D
{
YAMLFile::YAMLFile(Context* context)
: Resource(context)
{
}
void YAMLFile::RegisterObject(Context* context)
{
context->RegisterFactory
}
static std::regex isNumeric("-?[0-9]+(\\.[0-9]+)?");
// Convert rapidjson value to JSON value.
static void ToJSONValue(JSONValue& jsonValue, const YAML::Node& yamlValue)
{
switch (yamlValue.Type())
{
case YAML::NodeType::Null:
// Reset to null type
jsonValue.SetType(JSON_NULL);
break;
case YAML::NodeType::Scalar:
{
// Yaml does not make a distinction between data types. Instead of storing all values as strings we make a best
// guess here.
// `true` and `false` strings are treated as booleans
String value = yamlValue.Scalar();
if (value == "true")
jsonValue = true;
else if (value == "false")
jsonValue = false;
else
{
// All numbers are treated as doubles
const auto& scalar = yamlValue.Scalar();
if (std::regex_match(scalar, isNumeric))
jsonValue = yamlValue.as
else
jsonValue = scalar.c_str();
}
break;
}
case YAML::NodeType::Sequence:
{
jsonValue.Resize(yamlValue.size());
for (unsigned i = 0; i < yamlValue.size(); ++i)
{
ToJSONValue(jsonValue[i], yamlValue[i]);
}
break;
}
case YAML::NodeType::Map:
{
jsonValue.SetType(JSON_OBJECT);
for (auto i = yamlValue.begin(); i != yamlValue.end(); ++i)
{
JSONValue& value = jsonValue[i->first.as
ToJSONValue(value, i->second);
}
break;
}
default:
break;
}
}
bool YAMLFile::BeginLoad(Deserializer& source)
{
unsigned dataSize = source.GetSize();
if (!dataSize && !source.GetName().Empty())
{
URHO3D_LOGERROR("Zero sized JSON data in " + source.GetName());
return false;
}
SharedArrayPtr buffer(new char[dataSize + 1]);
if (source.Read(buffer.Get(), dataSize) != dataSize)
return false;
buffer[dataSize] = '\0';
auto document = YAML::Load(buffer.Get());
if (!document.IsDefined())
{
URHO3D_LOGERROR("Could not parse JSON data from " + source.GetName());
return false;
}
ToJSONValue(root_, document);
SetMemoryUse(dataSize);
return true;
}
static void ToYAMLValue(YAML::Node& yamlValue, const JSONValue& jsonValue)
{
switch (jsonValue.GetValueType())
{
case JSON_NULL:
yamlValue = YAML::Node(YAML::NodeType::Null);
break;
case JSON_BOOL:
yamlValue = jsonValue.GetBool() ? "true" : "false";
break;
case JSON_NUMBER:
{
switch (jsonValue.GetNumberType())
{
case JSONNT_INT:
yamlValue = jsonValue.GetInt();
break;
case JSONNT_UINT:
yamlValue = jsonValue.GetUInt();
break;
default:
yamlValue = jsonValue.GetDouble();
break;
}
break;
}
case JSON_STRING:
yamlValue = jsonValue.GetString();
break;
case JSON_ARRAY:
{
const JSONArray& jsonArray = jsonValue.GetArray();
yamlValue = YAML::Node(YAML::NodeType::Sequence);
for (unsigned i = 0; i < jsonArray.Size(); ++i)
{
YAML::Node value;
ToYAMLValue(value, jsonArray[i]);
yamlValue.push_back(value);
}
break;
}
case JSON_OBJECT:
{
const JSONObject& jsonObject = jsonValue.GetObject();
yamlValue = YAML::Node(YAML::NodeType::Map);
for (JSONObject::ConstIterator i = jsonObject.Begin(); i != jsonObject.End(); ++i)
{
YAML::Node value;
ToYAMLValue(value, i->second_);
yamlValue[i->first_] = value;
}
break;
}
default:
break;
}
}
bool YAMLFile::Save(Serializer& dest) const
{
return Save(dest, 2);
}
bool YAMLFile::Save(Serializer& dest, int indendation) const
{
YAML::Node document;
ToYAMLValue(document, root_);
YAML::Emitter out;
out << YAML::Indent(indendation);
out << document;
auto size = (unsigned)strlen(out.c_str());
return dest.Write(out.c_str(), size) == size;
}
bool YAMLFile::FromString(const String& source)
{
return ParseYAML(source, root_, false);
}
bool YAMLFile::ParseYAML(const String& yaml, JSONValue& value, bool reportError)
{
if (yaml.Empty())
return false;
auto yamlNode = YAML::Load(yaml.CString());
if (!yamlNode.IsDefined())
{
if (reportError)
URHO3D_LOGERRORF("Could not parse YAML data from string");
return false;
}
ToJSONValue(value, yamlNode);
return true;
}
} | c++ | 22 | 0.60417 | 119 | 23.588448 | 277 | starcoderdata |
import React from 'react';
import { View, Text, Button, StyleSheet } from 'react-native';
const App = () => {
const generateNumber = () => {
const number = Math.floor(Math.random() * 10);
alert(number);
}
return (
<View style={styles.app__container}>
<Text style={styles.container__title}>Gerador de Números Randômicos:
<Button onPress={generateNumber} title='Gerar número' />
);
};
const styles = StyleSheet.create({
app__container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center'
},
container__title: {
fontSize: 16,
marginBottom: 16,
},
});
export default App; | javascript | 14 | 0.627874 | 81 | 20.121212 | 33 | starcoderdata |
package testdb
import (
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/jmoiron/sqlx"
"github.com/stretchr/testify/require"
)
// MockDatabase returns a new *sqlx.DB mock alongside with a wrapper providing easier interface for asserting expectations.
func MockDatabase(t *testing.T) (*sqlx.DB, DBMock) {
sqlDB, sqlMock, err := sqlmock.New()
require.NoError(t, err)
sqlxDB := sqlx.NewDb(sqlDB, "sqlmock")
return sqlxDB, &sqlMockWithAssertions{sqlMock}
}
// DBMock represents a wrapper providing easier interface for asserting expectations.
type DBMock interface {
sqlmock.Sqlmock
AssertExpectations(t *testing.T)
}
type sqlMockWithAssertions struct {
sqlmock.Sqlmock
}
// AssertExpectations asserts that all the expectations to the mock were met.
func (s *sqlMockWithAssertions) AssertExpectations(t *testing.T) {
err := s.ExpectationsWereMet()
require.NoError(t, err)
} | go | 9 | 0.762582 | 123 | 24.388889 | 36 | starcoderdata |
using System;
using System.Collections.Generic;
using Classes;
using Game;
using Libraries.FActions;
using Libraries.FActions.General;
using Tools;
using UnityEngine;
namespace Libraries.Buildings
{
[Serializable]
public abstract class BaseDataBuildingUnitMgmt : BaseMgmt where T : BaseDataBuildingUnit, new()
{
protected ActionHolder lastAction;
protected BaseDataBuildingUnitMgmt(string id) : base(id) { }
protected BaseDataBuildingUnitMgmt(string id, string icon) : base(id, icon) { }
protected override void ParseElement(T ele, string header, string data)
{
switch (header)
{
case "modi":
Delimiter(ele.modi, data);
break;
case "action":
ActionParse(ele, data);
break;
case "actionreq":
lastAction.req.Add(data);
break;
case "actiondata":
var d = SplitHelper.Delimiter(data);
lastAction.data.Add(d.key,d.value);
break;
case "buildtime":
ele.buildTime = Int(data);
break;
case "cost":
Res(ele.cost,data);
break;
case "visible":
ele.visibilityRange = Int(data);
break;
case "hp":
ele.hp = Int(data);
break;
case "ap":
ele.ap = Int(data);
break;
case "atk":
ele.atk = Int(data);
break;
case "def":
ele.def = Int(data);
break;
case "dammin":
ele.damMin = Int(data);
break;
case "dammax":
ele.damMax = Int(data);
break;
default:
base.ParseElement(ele, header, data);
break;
}
}
protected void ActionParse(T ele, string data)
{
var a = SplitHelper.Delimiter(data);
lastAction = LClass.s.GetNewAction(a.key).Create(a.value);
ele.action.Add(lastAction);
}
}
} | c# | 17 | 0.463579 | 105 | 31.818182 | 77 | starcoderdata |
module.exports = {
desc: 'Tells you the current in-game time.',
cooldown: 5,
aliases: ['gametime'],
task(bot, msg) {
function addZero(i) {
if (i < 10) i = '0' + i;
return i;
}
let d = new Date();
bot.createMessage(msg.channel.id, `The current in-game time is **${addZero(d.getUTCHours())}:${addZero(d.getUTCMinutes())}**`);
}
} | javascript | 12 | 0.62766 | 129 | 24.133333 | 15 | starcoderdata |
package com.cadastro.desafio;
import org.junit.jupiter.api.Test;
class DesafioApplicationTests {
@Test
public void contextLoads() {
}
} | java | 4 | 0.77027 | 73 | 19.272727 | 11 | starcoderdata |
'use strict';
const { element } = require('protractor');
const pageCommon = require('./common.js'); const common = new pageCommon();
class HomePage {
//elements
btnAllowNotification() { return element(by.id('onesignal-slidedown-allow-button')); }
btnClosePopUp() { return element(by.id("getsitecontrol-140168")).element(by.css_sr('::sr button'));; }
btnConfirmCookies() { return element(by.id("getsitecontrol-44266")).element(by.css_sr('::sr span'));; }
btnFiltrarPequisa() { return element(by.xpath('//button[@class="search-input-icon"]')); }
btnTipoPesquisa(tipoPesquisa) { return element(by.xpath(`//a[text()="${tipoPesquisa}"]`)); }
viewAlertDialog() { return element(by.id('onesignal-slidedown-dialog')); }
viewFiltroPesquisa() { return element(by.xpath('//input[@placeholder="Qual curso você está procurando? Ex.: Polícia Militar"]')); }
viewHomePage() { return element(by.id('b_home')); }
//functions
async closeAlerts() {
await common.loadingElement(this.viewAlertDialog(), 300);
await common.clickElement(this.btnAllowNotification(), 300);
await common.clickElement(this.btnClosePopUp(), 300);
return await common.clickElement(this.btnConfirmCookies(), 300);
}
}
module.exports = HomePage; | javascript | 10 | 0.705179 | 133 | 47.307692 | 26 | starcoderdata |
import psycopg2
from .conf.configuracion import Configuracion
from .extraer.extraer import Extraer
from .cargar.cargar import Cargar
def run():
try:
cnx = psycopg2.connect(user=Configuracion.USER_SOURCE, password=Configuracion.PASSWORD_SOURCE, host='localhost',
database=Configuracion.DATABASE_SOURCE)
cny = psycopg2.connect(user=Configuracion.USER_DESTINATION, password=Configuracion.PASSWORD_DESTINATION, host = 'localhost',
database=Configuracion.DATABASE_DESTINATION)
cursor1 = cnx.cursor()
cursor2 = cny.cursor()
Cargar.cargarModelos(Extraer.obtenerModelos(Configuracion.getTables(), cursor1), cursor2)
cny.commit()
cursor1.close()
cursor2.close()
cnx.close()
cny.close()
print("=============================================================="
"==================================\n=========================="
" ETL ejecutado con exito :) ==================================")
return 1
except (Exception, psycopg2.Error) as error:
print('Error', error)
return 0 | python | 12 | 0.542833 | 132 | 42.666667 | 27 | starcoderdata |
static void set_lowest_possible_win_timer_resolution()
{
// First crank up the multimedia timer resolution to its max
// this gives the system much finer timeslices (usually 1-2ms)
win_timer_result = timeGetDevCaps(&win_timer_caps, sizeof(win_timer_caps));
if (win_timer_result == TIMERR_NOERROR)
timeBeginPeriod(win_timer_caps.wPeriodMin);
// Then try the even finer sliced (usually 0.5ms) low level variant
hNtDll = LoadLibrary("NtDll.dll");
if (hNtDll) {
NtQueryTimerResolution = (NTQUERYTIMERRESOLUTION)GetProcAddress(hNtDll, "NtQueryTimerResolution");
NtSetTimerResolution = (NTSETTIMERRESOLUTION)GetProcAddress(hNtDll, "NtSetTimerResolution");
if (NtQueryTimerResolution && NtSetTimerResolution) {
ULONG min_period, tmp;
NtQueryTimerResolution(&tmp, &min_period, &win_timer_old_period);
if (min_period < 4500) // just to not screw around too much with the time (i.e. potential timer improvements in future HW/OSs), limit timer period to 0.45ms (picked 0.45 here instead of 0.5 as apparently some current setups can feature values just slightly below 0.5, so just leave them at this native rate then)
min_period = 5000;
if (min_period < 10000) // only set this if smaller 1ms, cause otherwise timeBeginPeriod already did the job
NtSetTimerResolution(min_period, TRUE, &tmp);
else
win_timer_old_period = -1;
}
}
} | c++ | 12 | 0.736842 | 315 | 53.56 | 25 | inline |
package com.sosog.common.enums;
/**
* 数据源
*
* @author web
*/
public enum DataSourceType
{
/**
* 主库
*/
MASTER,
/**
* 从库
*/
SLAVE
} | java | 11 | 0.565737 | 77 | 11.55 | 20 | starcoderdata |
<div class="container">
<div class="row">
<div class="col-md-6 col-sm-12">
href="#"><i class="fa fa-facebook">
href="#"><i class="fa fa-twitter">
href="#"><i class="fa fa-google-plus">
href="#"><i class="fa fa-envelope">
<div class="col-md-6 col-sm-12">
href="https://rubik-technologies.com/">Azharku Media
<script type="text/javascript">
window.odometerOptions = {
format: '(,ddd)',
};
<script src="<?= base_url() ?>assets/bootstrap-4/js/jquery-3.4.1.min.js">
<script src="<?= base_url() ?>assets/web/js/vendor/jquery-3.1.0.min.js">
<script src="<?= base_url() ?>assets/web/js/vendor/jquery.easing.min.js">
<script src="<?= base_url() ?>assets/web/js/vendor/tether.js">
<script src="<?= base_url() ?>assets/web/js/vendor/bootstrap.js">
<script src="<?= base_url() ?>assets/web/js/vendor/slick.js">
<script src="<?= base_url() ?>assets/web/js/vendor/isotope.pkgd.min.js">
<script src="<?= base_url() ?>assets/web/js/vendor/odometer.min.js">
<script src="<?= base_url() ?>assets/web/js/main.js">
<script src="<?= base_url() ?>assets/vendors/sweetalert/dist/sweetalert2.all.min.js">
<script src="<?= base_url() ?>assets/vendors/sweetalert/js/script.js">
$(document).ready(function() {
$('.custom-file-input').on('change', function() {
let fileName = $(this).val().split('\\').pop();
$(this).next('.custom-file-label').addClass("selected").html(fileName);
});
}); | php | 4 | 0.552187 | 94 | 39.26 | 50 | starcoderdata |
TYPED_TEST(FhirPathTest, TestFieldExists) {
auto test_encounter = ValidEncounter<typename TypeParam::Encounter>();
test_encounter.mutable_class_value()->mutable_display()->set_value("foo");
EvaluationResult root_result =
TestFixture::Evaluate(test_encounter, "period").value();
EXPECT_THAT(
root_result.GetMessages(),
UnorderedElementsAreArray({EqualsProto(test_encounter.period())}));
// Tests the conversion from camelCase to snake_case
EvaluationResult camel_case_result =
TestFixture::Evaluate(test_encounter, "statusHistory").value();
EXPECT_THAT(camel_case_result.GetMessages(),
UnorderedElementsAreArray(
{EqualsProto(test_encounter.status_history(0))}));
// Test that the json_name field annotation is used when searching for a
// field.
EvaluationResult json_name_alias_result =
TestFixture::Evaluate(test_encounter, "class").value();
EXPECT_THAT(
json_name_alias_result.GetMessages(),
UnorderedElementsAreArray({EqualsProto(test_encounter.class_value())}));
} | c++ | 13 | 0.714419 | 78 | 41.76 | 25 | inline |
######################################################################
##
## Events
##
######################################################################
class Events:
def __getattr__(self, name):
if hasattr(self.__class__, '__events__'):
assert name in self.__class__.__events__, \
"Event '%s' is not declared" % name
self.__dict__[name] = ev = _EventSlot(name)
return ev
def __repr__(self): return 'Events' + str(list(self))
__str__ = __repr__
def __len__(self): return NotImplemented
def __iter__(self):
def gen(dictitems=self.__dict__.items()):
for attr, val in dictitems:
if isinstance(val, _EventSlot):
yield val
return gen()
class _EventSlot:
def __init__(self, name):
self.targets = []
self.__name__ = name
def __repr__(self):
return 'event ' + self.__name__
def __call__(self, *a, **kw):
for f in self.targets: f(*a, **kw)
def __iadd__(self, f):
self.targets.append(f)
return self
def __isub__(self, f):
while f in self.targets: self.targets.remove(f)
return self
######################################################################
##
## Demo
##
######################################################################
if __name__ == '__main__':
class MyEvents(Events):
__events__ = ('OnChange', )
class ValueModel(object):
def __init__(self):
self.events = MyEvents()
self.__value = None
def __set(self, value):
if (self.__value == value): return
self.__value = value
self.events.OnChange()
##self.events.OnChange2() # would fail
def __get(self):
return self.__value
Value = property(__get, __set, None, 'The actual value')
class SillyView(object):
def __init__(self, model):
self.model = model
model.events.OnChange += self.DisplayValue
##model.events.OnChange2 += self.DisplayValue # would raise exeception
def DisplayValue(self):
print self.model.Value
model = ValueModel()
view = SillyView(model)
print '\n--- Events Demo ---'
# Events in action
for i in range(5):
model.Value = 2*i + 1
# Events introspection
print model.events
for event in model.events:
print event | python | 13 | 0.481324 | 79 | 27.731707 | 82 | starcoderdata |
//Secant Method
#include
#include
#define f(x) ((x)*sin(x) + cos(x)) //((x*x)- x - 6) //(pow(E,x) - (2*x) - 1)
#define df(x) ((x)*cos(x)) //(2*x - 1) //(pow(E,x) - 2)
#define E 2.718281828
#define EPSILON 0.0000005
int main(void){
FILE *fp1 = fopen("secant.txt", "w"), *fp2 = fopen("secantP1.txt", "w");
float x0, x1, x2, order, e0, e1 = E;
int count = 0;
printf("Enter the values of x0, and x1\n");
scanf("%f %f", &x0, &x1);
while(f(x0)*f(x1) > 0){
printf("Please enter the values of x0 and x1 again.\n");
scanf("%f %f", &x0, &x1);
}
printf("------------------------------------------------------------------------------------------------------------------------\n");
printf("Iteration.No.\tx0\t\tf(x0)\t\tx1\t\tf(x1)\t\tx2\t\tAbs. Error.\tOrder of Convergence\n");
printf("------------------------------------------------------------------------------------------------------------------------\n");
do{
x2 = (x0*f(x1) - x1*f(x0))/(f(x1) - f(x0));
e0 = fabs(x1-x0);
e1 = fabs(x2-x1);
order = logf(e1)/logf(e0);
printf("%d \t\t%f \t%f \t%f \t%f \t%f \t%f \t%f\n", count, x0, f(x0), x1, f(x1), x2, e1, order );
fprintf(fp1,"%f %f\n%f %f\n\n",x0, f(x0), x1, f(x1));
fprintf(fp2,"%f %f\n%f 0\n\n",x0, f(x0), x0);
fprintf(fp2,"%f %f\n%f 0\n\n",x1, f(x1), x1);
fprintf(fp2,"%f %f\n%f 0\n\n",x2, f(x2), x2);
count++;
x0 = x1;
x1 = x2;
} while(fabs(x1-x0) > EPSILON);
printf("\nRoot : %f\n", x1);
return 0;
} | c | 13 | 0.452507 | 134 | 36.9 | 40 | starcoderdata |
using ENBManager.Configuration.Models;
namespace ENBManager.Configuration.Interfaces
{
public interface IConfigurationManager where T : BaseSettings
{
///
/// The current in-memory settings.
///
T Settings { get; }
///
/// Loads the settings stored from the JSON file.
///
///
void LoadSettings();
///
/// Saves the current settings to the JSON file.
///
/// <param name="settings">
void SaveSettings();
///
/// Initializes the current settings if the JSON file does not exists.
///
void InitializeSettings();
///
/// Sets the read-only attribute of the JSON file.
///
/// <param name="readOnly">
void SetReadOnly(bool readOnly);
}
} | c# | 8 | 0.554082 | 78 | 27 | 35 | starcoderdata |
def filter_ooi_platforms(self, input):
'''
interim helper to reformat query output to OOI UI input format
'''
doc = {}
for row in input:
self.read_row(doc, row)
return doc | python | 8 | 0.534483 | 70 | 24.888889 | 9 | inline |
import numpy as np
from keras import backend as K
from keras.layers import Layer
import tensorflow as tf
def zero_loss(y_true, y_pred):
return K.mean(y_pred, axis=0)
class CenterLossLayer(Layer):
def __init__(self, alpha=0.5, num_classes=10, num_features=256, **kwargs):
super().__init__(**kwargs)
self.alpha = alpha
self.num_classes = num_classes
self.num_features = num_features
def build(self, input_shape):
self.centers = self.add_weight(name='centers',
shape=(self.num_classes, self.num_features),
initializer='uniform',
trainable=False)
# self.counter = self.add_weight(name='counter',
# shape=(1,),
# initializer='zeros',
# trainable=False) # just for debugging
super().build(input_shape)
def call(self, x, mask=None):
# x[0] is Nxn_features, x[1] is Nxn_classes onehot, self.centers is n_classesxn_features
delta_centers = K.dot(K.transpose(x[1]), (K.dot(x[1], self.centers) - x[0])) # 10x2
center_counts = K.sum(K.transpose(x[1]), axis=1, keepdims=True) + 1 # 10x1
delta_centers /= center_counts
new_centers = self.centers - self.alpha * delta_centers
self.add_update((self.centers, new_centers), x)
# self.add_update((self.counter, self.counter + 1), x)
self.result = x[0] - K.dot(x[1], self.centers)
self.result = K.sum(self.result ** 2, axis=1, keepdims=True) #/ K.dot(x[1], center_counts)
return self.result # Nx1
def compute_output_shape(self, input_shape):
return K.int_shape(self.result) | python | 14 | 0.557975 | 98 | 41.72093 | 43 | starcoderdata |
#include
namespace torchtext {
typedef std::vector StringList;
typedef ska_ordered::order_preserving_flat_hash_map<std::string, torch::Tensor>
VectorsMap;
typedef ska_ordered::order_preserving_flat_hash_map<std::string, int64_t>
IndexMap;
typedef std::tuple<std::string, std::vector std::vector
std::vector
VectorsStates;
struct Vectors : torch::CustomClassHolder {
public:
const std::string version_str_ = "0.0.1";
IndexMap stoi_;
VectorsMap stovec_;
torch::Tensor vectors_;
torch::Tensor unk_tensor_;
explicit Vectors(const IndexMap &stoi, const torch::Tensor vectors,
const torch::Tensor &unk_tensor);
explicit Vectors(const std::vector &tokens,
const std::vector &indices,
const torch::Tensor &vectors,
const torch::Tensor &unk_tensor);
std::unordered_map<std::string, int64_t> get_stoi();
torch::Tensor __getitem__(const std::string &token);
torch::Tensor lookup_vectors(const std::vector &tokens);
void __setitem__(const std::string &token, const torch::Tensor &vector);
int64_t __len__();
};
c10::intrusive_ptr _get_vectors_from_states(VectorsStates states);
VectorsStates _set_vectors_states(const c10::intrusive_ptr &self);
std::tuple<Vectors, std::vector _load_token_and_vectors_from_file(
const std::string &file_path, const std::string delimiter_str,
const int64_t num_cpus, c10::optional opt_unk_tensor);
} // namespace torchtext | c | 11 | 0.682868 | 80 | 38.190476 | 42 | starcoderdata |
internal void NotifyDisposed(OpenMode mode)
{
lock (internalTS)
{
switch (mode)
{
case OpenMode.Read:
readCount--;
break;
case OpenMode.ReadWrite:
readCount--;
writeCount--;
break;
case OpenMode.Write:
writeCount--;
break;
}
// Make sure we dispose it, it can be ignored.
if (writeCount == 0 && readCount == 0)
{
internalTS.Dispose();
}
}
} | c# | 11 | 0.321381 | 62 | 29.16 | 25 | inline |
void new_record::calculate_tomo_correction(new_tomo* my_tomo)
{
//cout << "--> working on tomo correction "<< endl;
int count;
double velocity_tmp;
double dvs_tomo;
int idep, ilat , ilon;
this->dt_tomo_correction = 0;
for(count = 0; count < this->CP_num; count ++)
{
idep = this->CP_idep[count];
ilat = this->CP_ilat[count];
ilon = this->CP_ilon[count];
dvs_tomo = my_tomo->my_cell[idep][ilat][ilon].dvs;
velocity_tmp = this->CP_v_PREM[count] * (1+ dvs_tomo / 100);
this->dt_tomo_correction += (this->CP_dl[count] / velocity_tmp )
- (this->CP_dl[count] / this->CP_v_PREM[count]);
if( this->CP_v_PREM[count] == 0)
cout << "idep ilat ilon " << idep << " " << ilat << " " << ilon << endl;
//cout << " "
//<< " V_prem is "<< this->CP_v_PREM[count]
//<< " V_tomo is " << velocity_tmp
//<< " tomo T is " << (this->CP_dl[count] / velocity_tmp )
//<< " PREM T is " << (this->CP_dl[count] / this->CP_v_PREM[count]) << endl;
}
//cout <<"tomography correction is "<< this->dt_tomo_correction << endl;
} | c++ | 15 | 0.566981 | 80 | 27.675676 | 37 | inline |
import torch
import torch.nn as nn
import time
from causal_transformer_decoder import (
CausalTransformerDecoder,
CausalTransformerDecoderLayer,
)
from tqdm import tqdm
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"--type_generation", default="short", type=str,
)
parser.add_argument("--vocab_size", default=30000, type=int)
parser.add_argument("--bsz", default=8, type=int)
parser.add_argument("--input_len_long_gen", default=500, type=int)
args = parser.parse_args()
hdim = 512
nhead = 8
dim_feedforward = hdim * 4
num_layers = 6
device = "cuda" if torch.cuda.is_available() else "cpu"
print(f"Device used: {device}")
bsz = args.bsz
vocab_size = args.vocab_size
if args.type_generation == "short":
input_lens = [10, 25, 50, 100, 200, 300, 400, 500]
output_lens = input_lens
n_experiments = [10, 10, 5, 1, 1, 1, 1, 1]
elif args.type_generation == "long":
output_lens = [500, 1000, 1500, 2000]
input_lens = [args.input_len_long_gen] * len(output_lens)
n_experiments = [1] * len(output_lens)
def generate_square_subsequent_mask(sz: int, device: str = "cpu") -> torch.Tensor:
mask = (torch.triu(torch.ones(sz, sz)) == 1).transpose(0, 1)
mask = (
mask.float()
.masked_fill(mask == 0, float("-inf"))
.masked_fill(mask == 1, float(0.0))
).to(device=device)
return mask
# Naive way to use transformers
transformer = nn.Transformer(
d_model=hdim,
nhead=nhead,
num_encoder_layers=num_layers,
num_decoder_layers=num_layers,
dim_feedforward=dim_feedforward,
).to(device=device)
transformer.eval()
# Decoupling encoder and decoder
encoder = nn.TransformerEncoder(
nn.TransformerEncoderLayer(
d_model=hdim, nhead=nhead, dim_feedforward=dim_feedforward
),
num_layers=num_layers,
).to(device=device)
encoder.eval()
torch.manual_seed(42)
decoder = nn.TransformerDecoder(
nn.TransformerDecoderLayer(
d_model=hdim, nhead=nhead, dim_feedforward=dim_feedforward
),
num_layers=num_layers,
).to(device=device)
decoder.eval()
# Causal Decoder
torch.manual_seed(42)
causal_decoder = CausalTransformerDecoder(
CausalTransformerDecoderLayer(
d_model=hdim, nhead=nhead, dim_feedforward=dim_feedforward,
),
num_layers=num_layers,
).to(device=device)
causal_decoder.eval()
to_vocab = nn.Linear(hdim, vocab_size).to(device=device)
to_vocab.eval()
embedding = nn.Embedding(vocab_size, hdim).to(device=device)
embedding.eval()
time_exp_regular_transf = [[] for _ in range(len(input_lens))]
time_exp_enc_dec = [[] for _ in range(len(input_lens))]
time_exp_causal_end_dec = [[] for _ in range(len(input_lens))]
with torch.no_grad():
for len_index, (input_len, output_len) in tqdm(
enumerate(zip(input_lens, output_lens))
):
for _ in range(n_experiments[len_index]):
src = torch.rand(input_len, bsz, hdim).to(device=device)
first_token = torch.zeros((1, bsz)).long().to(device=device)
# Inference loops for the three models
t = time.time()
decoded_tokens = first_token
for i in range(output_len):
mask_dec = generate_square_subsequent_mask(
i + 1, device=first_token.device
) # create mask for autoregressive decoding
decoded_embeddings = embedding(decoded_tokens)
output = transformer(src, decoded_embeddings, tgt_mask=mask_dec)
logits = to_vocab(output) # projection to vocab size
# keep most likely tokens
top_indices = torch.argmax(logits, dim=-1)
# we only care about the last token that was decoded
top_indices_last_token = top_indices[-1:]
# add most likely token to the already decoded tokens
decoded_tokens = torch.cat(
[decoded_tokens, top_indices_last_token], dim=0
)
time_exp_regular_transf[len_index].append(time.time() - t)
t = time.time()
decoded_tokens = first_token
src_embeddings = encoder(src)
for i in range(output_len):
mask_dec = generate_square_subsequent_mask(
i + 1, device=first_token.device
) # create mask for autoregressive decoding
decoded_embeddings = embedding(decoded_tokens)
output = decoder(decoded_embeddings, src_embeddings, tgt_mask=mask_dec)
logits = to_vocab(output) # projection to vocab size
# keep most likely tokens
top_indices = torch.argmax(logits, dim=-1)
# we only care about the last token that was decoded
top_indices_last_token = top_indices[-1:]
# add most likely token to the already decoded tokens
decoded_tokens = torch.cat(
[decoded_tokens, top_indices_last_token], dim=0
)
time_exp_enc_dec[len_index].append(time.time() - t)
logits_enc_dec = logits
t = time.time()
decoded_tokens = first_token
src_embeddings = encoder(src)
cache = None
for i in range(output_len):
decoded_embeddings = embedding(decoded_tokens)
output, cache = causal_decoder(
decoded_embeddings, src_embeddings, cache
)
logits = to_vocab(output) # projection to vocab size
# keep most likely tokens
top_indices = torch.argmax(logits, dim=-1)
# we only care about the last token that was decoded
top_indices_last_token = top_indices[-1:]
# add most likely token to the already decoded tokens
decoded_tokens = torch.cat(
[decoded_tokens, top_indices_last_token], dim=0
)
time_exp_causal_end_dec[len_index].append(time.time() - t)
logits_causal = logits
time_exp_regular_transf = [sum(sub) / len(sub) for sub in time_exp_regular_transf]
time_exp_enc_dec = [sum(sub) / len(sub) for sub in time_exp_enc_dec]
time_exp_causal_end_dec = [sum(sub) / len(sub) for sub in time_exp_causal_end_dec]
print(
"Bsz, hdim, Vocab size, Len input, Len output,"
" Regular Transf, Enc/Dec, Causal Enc/Dec"
)
for (input_len, output_len, time_transf, time_enc_dec, time_causal,) in zip(
input_lens,
output_lens,
time_exp_regular_transf,
time_exp_enc_dec,
time_exp_causal_end_dec,
):
print(
f"{bsz}, {hdim}, {vocab_size}, {input_len}, {output_len}, "
f"{time_transf:.4}, {time_enc_dec:.4}, {time_causal:.4}"
) | python | 16 | 0.609281 | 87 | 34.393782 | 193 | starcoderdata |
package com.mewen.chloride.features;
import com.google.gson.JsonElement;
import com.mewen.chloride.AChlorideFeature;
import net.fabricmc.api.EnvType;
import net.fabricmc.api.Environment;
import net.fabricmc.fabric.api.client.keybinding.v1.KeyBindingHelper;
import net.minecraft.client.MinecraftClient;
import net.minecraft.client.options.DoubleOption;
import net.minecraft.client.options.KeyBinding;
import net.minecraft.client.options.Option;
import net.minecraft.client.util.InputUtil;
import org.lwjgl.glfw.GLFW;
@Environment(EnvType.CLIENT)
public class ChlorideZoom extends AChlorideFeature
{
private ZoomConfiguration zoomConfiguration = new ZoomConfiguration();
private static final MinecraftClient mc = MinecraftClient.getInstance();
private Boolean currentlyZoomed = false;
private Boolean originalSmoothCameraEnabled = false;
private final KeyBinding key = new KeyBinding("key.chloride.zoom", InputUtil.Type.KEYSYM, GLFW.GLFW_KEY_Z, "category.chloride");
public ChlorideZoom()
{
super(new ZoomGUIConfiguration("option.chloride.category.zoom"));
}
@Override
public void OnRegistration()
{
KeyBindingHelper.registerKeyBinding(key);
ZoomGUIConfiguration fogConfigurationController = (ZoomGUIConfiguration) GetGUI();
fogConfigurationController.SetZoomController(this);
}
@Override
public JsonElement SerializeConfiguration()
{
return gson.toJsonTree(zoomConfiguration);
}
@Override
public void DeserializeConfiguration(JsonElement json)
{
zoomConfiguration = gson.fromJson(json, ZoomConfiguration.class);
}
public double GetZoomFOV() {return zoomConfiguration.zoomFOV;}
public void SetZoomFOV(double value) {zoomConfiguration.zoomFOV = value;}
public boolean IsZooming() {
return key.isPressed();
}
public void ManageSmoothCamera()
{
boolean isZooming = key.isPressed();
if (isZooming && !currentlyZoomed)
{
originalSmoothCameraEnabled = mc.options.smoothCameraEnabled;
currentlyZoomed = true;
mc.options.smoothCameraEnabled = true;
}
else if (!isZooming && currentlyZoomed)
{
currentlyZoomed = false;
mc.options.smoothCameraEnabled = originalSmoothCameraEnabled;
}
}
@Environment(EnvType.CLIENT)
public static class ZoomConfiguration
{
public double zoomFOV = 19.0;
}
@Environment(EnvType.CLIENT)
public static class ZoomGUIConfiguration extends AGUIConfiguration
{
private ChlorideZoom zoom;
public ZoomGUIConfiguration(String categoryKey)
{
super(categoryKey);
}
private void SetZoomController(ChlorideZoom zoom)
{
this.zoom = zoom;
}
@Override
public Option[] GetOptions()
{
DoubleOption ZOOM = new DoubleOption("option.chloride.category.zoom.name", 5.0D, 50.0D, 1.0F,
(gameOptions) -> zoom.GetZoomFOV(),
(gameOptions, double_) -> zoom.SetZoomFOV(double_),
(gameOptions, doubleOption) -> doubleOption.getDisplayPrefix().append(Integer.toString((int) zoom.GetZoomFOV())));
return new Option[]{ZOOM};
}
}
} | java | 19 | 0.682511 | 134 | 31.475728 | 103 | starcoderdata |
#include <cstdio>
#include <algorithm>
using namespace std;
const int MAX_N = 3005;
const int MAX_BN = 200005;
const int MOD = 1000000007;
typedef long long ll;
int h,w,n,dp[MAX_N];
ll fac[MAX_BN],faci[MAX_BN];
struct p {
int r,c;
} b[MAX_N];
ll fpow(ll a, ll b)
{
ll r=1;
for (;b;b/=2,a=(a*a)%MOD)
if (b&1)
r=(r*a)%MOD;
return r;
}
int bnk(int n, int k)
{
return (((fac[n]*faci[k])%MOD)*faci[n-k])%MOD;
}
int main()
{
fac[0]=faci[0]=1;
for (int i=1; i<MAX_BN; i++)
{
fac[i]=(fac[i-1]*i)%MOD;
faci[i]=fpow(fac[i],MOD-2);
}
scanf("%d %d %d",&h,&w,&n);
for (int i=1; i<=n; i++)
scanf("%d %d",&b[i].r,&b[i].c);
sort(b+1,b+1+n,[](const p &a, const p &b) {
if (a.r==b.r)
return a.c<b.c;
return a.r<b.r;
});
n++;
b[n].r=h,b[n].c=w;
for (int i=1; i<=n; i++)
dp[i]=bnk(b[i].r+b[i].c-2,b[i].r-1);
for (int i=1; i<=n; i++)
for (int j=i+1; j<=n; j++)
if (b[i].r<=b[j].r&&b[i].c<=b[j].c)
dp[j]=(dp[j]-
((dp[i]*1LL*bnk(b[j].r-b[i].r+b[j].c-b[i].c,
b[j].r-b[i].r))%MOD)+MOD)%MOD;
printf("%d\n",dp[n]);
return 0;
}
| c++ | 24 | 0.5 | 49 | 16.766667 | 60 | codenet |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.